after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def penn_tokenize(self, text, return_str=False):
"""
This is a Python port of the Penn treebank tokenizer adapted by the Moses
machine translation community. It's a little different from the
version in nltk.tokenize.treebank.
"""
# Converts input string into unicode.
text = text_type(text)
# Perform a chain of regex substituitions using MOSES_PENN_REGEXES_1
for regexp, substitution in self.MOSES_PENN_REGEXES_1:
text = re.sub(regexp, substitution, text)
# Handles nonbreaking prefixes.
text = self.handles_nonbreaking_prefixes(text)
# Restore ellipsis, clean extra spaces, escape XML symbols.
for regexp, substitution in self.MOSES_PENN_REGEXES_2:
text = re.sub(regexp, substitution, text)
return text if return_str else text.split()
|
def penn_tokenize(self, text, return_str=False):
"""
This is a Python port of the Penn treebank tokenizer adapted by the Moses
machine translation community. It's a little different from the
version in nltk.tokenize.treebank.
"""
# Converts input string into unicode.
text = text_type(text)
# Perform a chain of regex substituitions using MOSES_PENN_REGEXES_1
for regexp, subsitution in self.MOSES_PENN_REGEXES_1:
text = re.sub(regexp, subsitution, text)
# Handles nonbreaking prefixes.
text = handles_nonbreaking_prefixes(text)
# Restore ellipsis, clean extra spaces, escape XML symbols.
for regexp, subsitution in self.MOSES_PENN_REGEXES_2:
text = re.sub(regexp, subsitution, text)
return text if return_str else text.split()
|
https://github.com/nltk/nltk/issues/1551
|
$ python -c 'from nltk.tokenize.moses import MosesTokenizer; m = MosesTokenizer(); m.penn_tokenize("this aint funny")'
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "nltk/tokenize/moses.py", line 299, in penn_tokenize
text = re.sub(regexp, subsitution, text)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 155, in sub
return _compile(pattern, flags).sub(repl, string, count)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 251, in _compile
raise error, v # invalid expression
sre_constants.error: unbalanced parenthesis
|
sre_constants.error
|
def tokenize(self, text, agressive_dash_splits=False, return_str=False):
"""
Python port of the Moses tokenizer.
>>> mtokenizer = MosesTokenizer()
>>> text = u'Is 9.5 or 525,600 my favorite number?'
>>> print (mtokenizer.tokenize(text, return_str=True))
Is 9.5 or 525,600 my favorite number ?
>>> text = u'The https://github.com/jonsafari/tok-tok/blob/master/tok-tok.pl is a website with/and/or slashes and sort of weird : things'
>>> print (mtokenizer.tokenize(text, return_str=True))
The https : / / github.com / jonsafari / tok-tok / blob / master / tok-tok.pl is a website with / and / or slashes and sort of weird : things
>>> text = u'This, is a sentence with weird\xbb symbols\u2026 appearing everywhere\xbf'
>>> expected = u'This , is a sentence with weird \xbb symbols \u2026 appearing everywhere \xbf'
>>> assert mtokenizer.tokenize(text, return_str=True) == expected
:param tokens: A single string, i.e. sentence text.
:type tokens: str
:param agressive_dash_splits: Option to trigger dash split rules .
:type agressive_dash_splits: bool
"""
# Converts input string into unicode.
text = text_type(text)
# De-duplicate spaces and clean ASCII junk
for regexp, substitution in [self.DEDUPLICATE_SPACE, self.ASCII_JUNK]:
text = re.sub(regexp, substitution, text)
# Strips heading and trailing spaces.
text = text.strip()
# Separate special characters outside of IsAlnum character set.
regexp, substitution = self.PAD_NOT_ISALNUM
text = re.sub(regexp, substitution, text)
# Aggressively splits dashes
if agressive_dash_splits:
regexp, substitution = self.AGGRESSIVE_HYPHEN_SPLIT
text = re.sub(regexp, substitution, text)
# Replaces multidots with "DOTDOTMULTI" literal strings.
text = self.replace_multidots(text)
# Separate out "," except if within numbers e.g. 5,300
for regexp, substitution in [self.COMMA_SEPARATE_1, self.COMMA_SEPARATE_2]:
text = re.sub(regexp, substitution, text)
# (Language-specific) apostrophe tokenization.
if self.lang == "en":
for regexp, substitution in self.ENGLISH_SPECIFIC_APOSTROPHE:
text = re.sub(regexp, substitution, text)
elif self.lang in ["fr", "it"]:
for regexp, substitution in self.FR_IT_SPECIFIC_APOSTROPHE:
text = re.sub(regexp, substitution, text)
else:
regexp, substitution = self.NON_SPECIFIC_APOSTROPHE
text = re.sub(regexp, substitution, text)
# Handles nonbreaking prefixes.
text = self.handles_nonbreaking_prefixes(text)
# Cleans up extraneous spaces.
regexp, substitution = self.DEDUPLICATE_SPACE
text = re.sub(regexp, substitution, text).strip()
# Restore multidots.
text = self.restore_multidots(text)
# Escape XML symbols.
text = self.escape_xml(text)
return text if return_str else text.split()
|
def tokenize(self, text, agressive_dash_splits=False, return_str=False):
"""
Python port of the Moses tokenizer.
>>> mtokenizer = MosesTokenizer()
>>> text = u'Is 9.5 or 525,600 my favorite number?'
>>> print (mtokenizer.tokenize(text, return_str=True))
Is 9.5 or 525,600 my favorite number ?
>>> text = u'The https://github.com/jonsafari/tok-tok/blob/master/tok-tok.pl is a website with/and/or slashes and sort of weird : things'
>>> print (mtokenizer.tokenize(text, return_str=True))
The https : / / github.com / jonsafari / tok-tok / blob / master / tok-tok.pl is a website with / and / or slashes and sort of weird : things
>>> text = u'This, is a sentence with weird\xbb symbols\u2026 appearing everywhere\xbf'
>>> expected = u'This , is a sentence with weird \xbb symbols \u2026 appearing everywhere \xbf'
>>> assert mtokenizer.tokenize(text, return_str=True) == expected
:param tokens: A single string, i.e. sentence text.
:type tokens: str
:param agressive_dash_splits: Option to trigger dash split rules .
:type agressive_dash_splits: bool
"""
# Converts input string into unicode.
text = text_type(text)
# De-duplicate spaces and clean ASCII junk
for regexp, subsitution in [self.DEDUPLICATE_SPACE, self.ASCII_JUNK]:
text = re.sub(regexp, subsitution, text)
# Strips heading and trailing spaces.
text = text.strip()
# Separate special characters outside of IsAlnum character set.
regexp, subsitution = self.PAD_NOT_ISALNUM
text = re.sub(regexp, subsitution, text)
# Aggressively splits dashes
if agressive_dash_splits:
regexp, subsitution = self.AGGRESSIVE_HYPHEN_SPLIT
text = re.sub(regexp, subsitution, text)
# Replaces multidots with "DOTDOTMULTI" literal strings.
text = self.replace_multidots(text)
# Separate out "," except if within numbers e.g. 5,300
for regexp, subsitution in [self.COMMA_SEPARATE_1, self.COMMA_SEPARATE_2]:
text = re.sub(regexp, subsitution, text)
# (Language-specific) apostrophe tokenization.
if self.lang == "en":
for regexp, subsitution in self.ENGLISH_SPECIFIC_APOSTROPHE:
text = re.sub(regexp, subsitution, text)
elif self.lang in ["fr", "it"]:
for regexp, subsitution in self.FR_IT_SPECIFIC_APOSTROPHE:
text = re.sub(regexp, subsitution, text)
else:
regexp, subsitution = self.NON_SPECIFIC_APOSTROPHE
text = re.sub(regexp, subsitution, text)
# Handles nonbreaking prefixes.
text = self.handles_nonbreaking_prefixes(text)
# Cleans up extraneous spaces.
regexp, subsitution = self.DEDUPLICATE_SPACE
text = re.sub(regexp, subsitution, text).strip()
# Restore multidots.
text = self.restore_multidots(text)
# Escape XML symbols.
text = self.escape_xml(text)
return text if return_str else text.split()
|
https://github.com/nltk/nltk/issues/1551
|
$ python -c 'from nltk.tokenize.moses import MosesTokenizer; m = MosesTokenizer(); m.penn_tokenize("this aint funny")'
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "nltk/tokenize/moses.py", line 299, in penn_tokenize
text = re.sub(regexp, subsitution, text)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 155, in sub
return _compile(pattern, flags).sub(repl, string, count)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 251, in _compile
raise error, v # invalid expression
sre_constants.error: unbalanced parenthesis
|
sre_constants.error
|
def unescape_xml(self, text):
for regexp, substitution in self.MOSES_UNESCAPE_XML_REGEXES:
text = re.sub(regexp, substitution, text)
return text
|
def unescape_xml(self, text):
for regexp, subsitution in self.MOSES_UNESCAPE_XML_REGEXES:
text = re.sub(regexp, subsitution, text)
return text
|
https://github.com/nltk/nltk/issues/1551
|
$ python -c 'from nltk.tokenize.moses import MosesTokenizer; m = MosesTokenizer(); m.penn_tokenize("this aint funny")'
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "nltk/tokenize/moses.py", line 299, in penn_tokenize
text = re.sub(regexp, subsitution, text)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 155, in sub
return _compile(pattern, flags).sub(repl, string, count)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 251, in _compile
raise error, v # invalid expression
sre_constants.error: unbalanced parenthesis
|
sre_constants.error
|
def tokenize(self, tokens, return_str=False):
"""
Python port of the Moses detokenizer.
:param tokens: A list of strings, i.e. tokenized text.
:type tokens: list(str)
:return: str
"""
# Convert the list of tokens into a string and pad it with spaces.
text = " {} ".format(" ".join(tokens))
# Converts input string into unicode.
text = text_type(text)
# Detokenize the agressive hyphen split.
regexp, substitution = self.AGGRESSIVE_HYPHEN_SPLIT
text = re.sub(regexp, substitution, text)
# Unescape the XML symbols.
text = self.unescape_xml(text)
# Keep track of no. of quotation marks.
quote_counts = {"'": 0, '"': 0, "``": 0, "`": 0, "''": 0}
# The *prepend_space* variable is used to control the "effects" of
# detokenization as the function loops through the list of tokens and
# changes the *prepend_space* accordingly as it sequentially checks
# through the language specific and language independent conditions.
prepend_space = " "
detokenized_text = ""
tokens = text.split()
# Iterate through every token and apply language specific detokenization rule(s).
for i, token in enumerate(iter(tokens)):
# Check if the first char is CJK.
if is_cjk(token[0]):
# Perform left shift if this is a second consecutive CJK word.
if i > 0 and is_cjk(token[-1]):
detokenized_text += token
# But do nothing special if this is a CJK word that doesn't follow a CJK word
else:
detokenized_text += prepend_space + token
prepend_space = " "
# If it's a currency symbol.
elif token in self.IsSc:
# Perform right shift on currency and other random punctuation items
detokenized_text += prepend_space + token
prepend_space = ""
elif re.search(r"^[\,\.\?\!\:\;\\\%\}\]\)]+$", token):
# In French, these punctuations are prefixed with a non-breakable space.
if self.lang == "fr" and re.search(r"^[\?\!\:\;\\\%]$", token):
detokenized_text += " "
# Perform left shift on punctuation items.
detokenized_text += token
prepend_space = " "
elif (
self.lang == "en"
and i > 0
and re.search("^['][{}]".format(self.IsAlpha), token)
):
# and re.search(u'[{}]$'.format(self.IsAlnum), tokens[i-1])):
# For English, left-shift the contraction.
detokenized_text += token
prepend_space = " "
elif (
self.lang == "cs"
and i > 1
and re.search(
r"^[0-9]+$", tokens[-2]
) # If the previous previous token is a number.
and re.search(r"^[.,]$", tokens[-1]) # If previous token is a dot.
and re.search(r"^[0-9]+$", token)
): # If the current token is a number.
# In Czech, left-shift floats that are decimal numbers.
detokenized_text += token
prepend_space = " "
elif (
self.lang in ["fr", "it"]
and i <= len(tokens) - 2
and re.search("[{}][']$".format(self.IsAlpha), token)
and re.search("^[{}]$".format(self.IsAlpha), tokens[i + 1])
): # If the next token is alpha.
# For French and Italian, right-shift the contraction.
detokenized_text += prepend_space + token
prepend_space = ""
elif (
self.lang == "cs"
and i <= len(tokens) - 3
and re.search("[{}][']$".format(self.IsAlpha), token)
and re.search("^[-β]$", tokens[i + 1])
and re.search("^li$|^mail.*", tokens[i + 2], re.IGNORECASE)
): # In Perl, ($words[$i+2] =~ /^li$|^mail.*/i)
# In Czech, right-shift "-li" and a few Czech dashed words (e.g. e-mail)
detokenized_text += prepend_space + token + tokens[i + 1]
next(tokens, None) # Advance over the dash
prepend_space = ""
# Combine punctuation smartly.
elif re.search(r"""^[\'\"ββ`]+$""", token):
normalized_quo = token
if re.search(r"^[βββ]+$", token):
normalized_quo = '"'
quote_counts.get(normalized_quo, 0)
if self.lang == "cs" and token == "β":
quote_counts[normalized_quo] = 0
if self.lang == "cs" and token == "β":
quote_counts[normalized_quo] = 1
if quote_counts[normalized_quo] % 2 == 0:
if (
self.lang == "en"
and token == "'"
and i > 0
and re.search(r"[s]$", tokens[i - 1])
):
# Left shift on single quote for possessives ending
# in "s", e.g. "The Jones' house"
detokenized_text += token
prepend_space = " "
else:
# Right shift.
detokenized_text += prepend_space + token
prepend_space = ""
quote_counts[normalized_quo] += 1
else:
# Left shift.
text += token
prepend_space = " "
quote_counts[normalized_quo] += 1
elif (
self.lang == "fi"
and re.search(r":$", tokens[i - 1])
and re.search(self.FINNISH_REGEX, token)
):
# Finnish : without intervening space if followed by case suffix
# EU:N EU:n EU:ssa EU:sta EU:hun EU:iin ...
detokenized_text += prepend_space + token
prepend_space = " "
else:
detokenized_text += prepend_space + token
prepend_space = " "
# Merge multiple spaces.
regexp, substitution = self.ONE_SPACE
detokenized_text = re.sub(regexp, substitution, detokenized_text)
# Removes heading and trailing spaces.
detokenized_text = detokenized_text.strip()
return detokenized_text if return_str else detokenized_text.split()
|
def tokenize(self, tokens, return_str=False):
"""
Python port of the Moses detokenizer.
:param tokens: A list of strings, i.e. tokenized text.
:type tokens: list(str)
:return: str
"""
# Convert the list of tokens into a string and pad it with spaces.
text = " {} ".format(" ".join(tokens))
# Converts input string into unicode.
text = text_type(text)
# Detokenize the agressive hyphen split.
regexp, subsitution = self.AGGRESSIVE_HYPHEN_SPLIT
text = re.sub(regexp, subsitution, text)
# Unescape the XML symbols.
text = self.unescape_xml(text)
# Keep track of no. of quotation marks.
quote_counts = {"'": 0, '"': 0, "``": 0, "`": 0, "''": 0}
# The *prepend_space* variable is used to control the "effects" of
# detokenization as the function loops through the list of tokens and
# changes the *prepend_space* accordingly as it sequentially checks
# through the language specific and language independent conditions.
prepend_space = " "
detokenized_text = ""
tokens = text.split()
# Iterate through every token and apply language specific detokenization rule(s).
for i, token in enumerate(iter(tokens)):
# Check if the first char is CJK.
if is_cjk(token[0]):
# Perform left shift if this is a second consecutive CJK word.
if i > 0 and is_cjk(token[-1]):
detokenized_text += token
# But do nothing special if this is a CJK word that doesn't follow a CJK word
else:
detokenized_text += prepend_space + token
prepend_space = " "
# If it's a currency symbol.
elif token in self.IsSc:
# Perform right shift on currency and other random punctuation items
detokenized_text += prepend_space + token
prepend_space = ""
elif re.match(r"^[\,\.\?\!\:\;\\\%\}\]\)]+$", token):
# In French, these punctuations are prefixed with a non-breakable space.
if self.lang == "fr" and re.match(r"^[\?\!\:\;\\\%]$", token):
detokenized_text += " "
# Perform left shift on punctuation items.
detokenized_text += token
prepend_space = " "
elif (
self.lang == "en"
and i > 0
and re.match("^['][{}]".format(self.IsAlpha), token)
and re.match("[{}]".format(self.IsAlnum), token)
):
# For English, left-shift the contraction.
detokenized_text += token
prepend_space = " "
elif (
self.lang == "cs"
and i > 1
and re.match(
r"^[0-9]+$", tokens[-2]
) # If the previous previous token is a number.
and re.match(r"^[.,]$", tokens[-1]) # If previous token is a dot.
and re.match(r"^[0-9]+$", token)
): # If the current token is a number.
# In Czech, left-shift floats that are decimal numbers.
detokenized_text += token
prepend_space = " "
elif (
self.lang in ["fr", "it"]
and i <= len(tokens) - 2
and re.match("[{}][']$".format(self.IsAlpha), token)
and re.match("^[{}]$".format(self.IsAlpha), tokens[i + 1])
): # If the next token is alpha.
# For French and Italian, right-shift the contraction.
detokenized_text += prepend_space + token
prepend_space = ""
elif (
self.lang == "cs"
and i <= len(tokens) - 3
and re.match("[{}][']$".format(self.IsAlpha), token)
and re.match("^[-β]$", tokens[i + 1])
and re.match("^li$|^mail.*", tokens[i + 2], re.IGNORECASE)
): # In Perl, ($words[$i+2] =~ /^li$|^mail.*/i)
# In Czech, right-shift "-li" and a few Czech dashed words (e.g. e-mail)
detokenized_text += prepend_space + token + tokens[i + 1]
next(tokens, None) # Advance over the dash
prepend_space = ""
# Combine punctuation smartly.
elif re.match(r"""^[\'\"ββ`]+$""", token):
normalized_quo = token
if re.match(r"^[βββ]+$", token):
normalized_quo = '"'
quote_counts.get(normalized_quo, 0)
if self.lang == "cs" and token == "β":
quote_counts[normalized_quo] = 0
if self.lang == "cs" and token == "β":
quote_counts[normalized_quo] = 1
if quote_counts[normalized_quo] % 2 == 0:
if (
self.lang == "en"
and token == "'"
and i > 0
and re.match(r"[s]$", tokens[i - 1])
):
# Left shift on single quote for possessives ending
# in "s", e.g. "The Jones' house"
detokenized_text += token
prepend_space = " "
else:
# Right shift.
detokenized_text += prepend_space + token
prepend_space = ""
quote_counts[normalized_quo] += 1
else:
# Left shift.
text += token
prepend_space = " "
quote_counts[normalized_quo] += 1
elif (
self.lang == "fi"
and re.match(r":$", tokens[i - 1])
and re.match(self.FINNISH_REGEX, token)
):
# Finnish : without intervening space if followed by case suffix
# EU:N EU:n EU:ssa EU:sta EU:hun EU:iin ...
detokenized_text += prepend_space + token
prepend_space = " "
else:
detokenized_text += prepend_space + token
prepend_space = " "
# Merge multiple spaces.
regexp, subsitution = self.ONE_SPACE
detokenized_text = re.sub(regexp, subsitution, detokenized_text)
# Removes heading and trailing spaces.
detokenized_text = detokenized_text.strip()
return detokenized_text if return_str else detokenized_text.split()
|
https://github.com/nltk/nltk/issues/1551
|
$ python -c 'from nltk.tokenize.moses import MosesTokenizer; m = MosesTokenizer(); m.penn_tokenize("this aint funny")'
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "nltk/tokenize/moses.py", line 299, in penn_tokenize
text = re.sub(regexp, subsitution, text)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 155, in sub
return _compile(pattern, flags).sub(repl, string, count)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 251, in _compile
raise error, v # invalid expression
sre_constants.error: unbalanced parenthesis
|
sre_constants.error
|
def handles_nonbreaking_prefixes(self, text):
# Splits the text into tokens to check for nonbreaking prefixes.
tokens = text.split()
num_tokens = len(tokens)
for i, token in enumerate(tokens):
# Checks if token ends with a fullstop.
token_ends_with_period = re.search(r"^(\S+)\.$", text)
if token_ends_with_period:
prefix = token_ends_with_period.group(0)
# Checks for 3 conditions if
# i. the prefix is a token made up of chars within the IsAlpha
# ii. the prefix is in the list of nonbreaking prefixes and
# does not contain #NUMERIC_ONLY#
# iii. the token is not the last token and that the
# next token contains all lowercase.
if (
(prefix and self.isalpha(prefix))
or (
prefix in self.NONBREAKING_PREFIXES
and prefix not in self.NUMERIC_ONLY_PREFIXES
)
or (i != num_tokens - 1 and self.islower(tokens[i + 1]))
):
pass # No change to the token.
# Checks if the prefix is in NUMERIC_ONLY_PREFIXES
# and ensures that the next word is a digit.
elif prefix in self.NUMERIC_ONLY_PREFIXES and re.search(
r"^[0-9]+", token[i + 1]
):
pass # No change to the token.
else: # Otherwise, adds a space after the tokens before a dot.
tokens[i] = prefix + " ."
return " ".join(tokens) # Stitch the tokens back.
|
def handles_nonbreaking_prefixes(self, text):
# Splits the text into tokens to check for nonbreaking prefixes.
tokens = text.split()
num_tokens = len(tokens)
for i, token in enumerate(tokens):
# Checks if token ends with a fullstop.
token_ends_with_period = re.match(r"^(\S+)\.$", text)
if token_ends_with_period:
prefix = token_ends_with_period.group(0)
# Checks for 3 conditions if
# i. the prefix is a token made up of chars within the IsAlpha
# ii. the prefix is in the list of nonbreaking prefixes and
# does not contain #NUMERIC_ONLY#
# iii. the token is not the last token and that the
# next token contains all lowercase.
if (
(prefix and self.isalpha(prefix))
or (
prefix in self.NONBREAKING_PREFIXES
and prefix not in self.NUMERIC_ONLY_PREFIXES
)
or (i != num_tokens - 1 and self.islower(tokens[i + 1]))
):
pass # No change to the token.
# Checks if the prefix is in NUMERIC_ONLY_PREFIXES
# and ensures that the next word is a digit.
elif prefix in self.NUMERIC_ONLY_PREFIXES and re.match(
r"^[0-9]+", token[i + 1]
):
pass # No change to the token.
else: # Otherwise, adds a space after the tokens before a dot.
tokens[i] = prefix + " ."
return " ".join(tokens) # Stitch the tokens back.
|
https://github.com/nltk/nltk/issues/1551
|
$ python -c 'from nltk.tokenize.moses import MosesTokenizer; m = MosesTokenizer(); m.penn_tokenize("this aint funny")'
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "nltk/tokenize/moses.py", line 299, in penn_tokenize
text = re.sub(regexp, subsitution, text)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 155, in sub
return _compile(pattern, flags).sub(repl, string, count)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 251, in _compile
raise error, v # invalid expression
sre_constants.error: unbalanced parenthesis
|
sre_constants.error
|
def tokenize(self, tokens, return_str=False):
"""
Python port of the Moses detokenizer.
:param tokens: A list of strings, i.e. tokenized text.
:type tokens: list(str)
:return: str
"""
# Convert the list of tokens into a string and pad it with spaces.
text = " {} ".format(" ".join(tokens))
# Converts input string into unicode.
text = text_type(text)
# Detokenize the agressive hyphen split.
regexp, substitution = self.AGGRESSIVE_HYPHEN_SPLIT
text = re.sub(regexp, substitution, text)
# Unescape the XML symbols.
text = self.unescape_xml(text)
# Keep track of no. of quotation marks.
quote_counts = {"'": 0, '"': 0, "``": 0, "`": 0, "''": 0}
# The *prepend_space* variable is used to control the "effects" of
# detokenization as the function loops through the list of tokens and
# changes the *prepend_space* accordingly as it sequentially checks
# through the language specific and language independent conditions.
prepend_space = " "
detokenized_text = ""
tokens = text.split()
# Iterate through every token and apply language specific detokenization rule(s).
for i, token in enumerate(iter(tokens)):
# Check if the first char is CJK.
if is_cjk(token[0]):
# Perform left shift if this is a second consecutive CJK word.
if i > 0 and is_cjk(token[-1]):
detokenized_text += token
# But do nothing special if this is a CJK word that doesn't follow a CJK word
else:
detokenized_text += prepend_space + token
prepend_space = " "
# If it's a currency symbol.
elif token in self.IsSc:
# Perform right shift on currency and other random punctuation items
detokenized_text += prepend_space + token
prepend_space = ""
elif re.search(r"^[\,\.\?\!\:\;\\\%\}\]\)]+$", token):
# In French, these punctuations are prefixed with a non-breakable space.
if self.lang == "fr" and re.search(r"^[\?\!\:\;\\\%]$", token):
detokenized_text += " "
# Perform left shift on punctuation items.
detokenized_text += token
prepend_space = " "
elif (
self.lang == "en"
and i > 0
and re.search("^['][{}]".format(self.IsAlpha), token)
):
# and re.search(u'[{}]$'.format(self.IsAlnum), tokens[i-1])):
# For English, left-shift the contraction.
detokenized_text += token
prepend_space = " "
elif (
self.lang == "cs"
and i > 1
and re.search(
r"^[0-9]+$", tokens[-2]
) # If the previous previous token is a number.
and re.search(r"^[.,]$", tokens[-1]) # If previous token is a dot.
and re.search(r"^[0-9]+$", token)
): # If the current token is a number.
# In Czech, left-shift floats that are decimal numbers.
detokenized_text += token
prepend_space = " "
elif (
self.lang in ["fr", "it"]
and i <= len(tokens) - 2
and re.search("[{}][']$".format(self.IsAlpha), token)
and re.search("^[{}]$".format(self.IsAlpha), tokens[i + 1])
): # If the next token is alpha.
# For French and Italian, right-shift the contraction.
detokenized_text += prepend_space + token
prepend_space = ""
elif (
self.lang == "cs"
and i <= len(tokens) - 3
and re.search("[{}][']$".format(self.IsAlpha), token)
and re.search("^[-β]$", tokens[i + 1])
and re.search("^li$|^mail.*", tokens[i + 2], re.IGNORECASE)
): # In Perl, ($words[$i+2] =~ /^li$|^mail.*/i)
# In Czech, right-shift "-li" and a few Czech dashed words (e.g. e-mail)
detokenized_text += prepend_space + token + tokens[i + 1]
next(tokens, None) # Advance over the dash
prepend_space = ""
# Combine punctuation smartly.
elif re.search(r"""^[\'\"ββ`]+$""", token):
normalized_quo = token
if re.search(r"^[βββ]+$", token):
normalized_quo = '"'
quote_counts.get(normalized_quo, 0)
if self.lang == "cs" and token == "β":
quote_counts[normalized_quo] = 0
if self.lang == "cs" and token == "β":
quote_counts[normalized_quo] = 1
if quote_counts[normalized_quo] % 2 == 0:
if (
self.lang == "en"
and token == "'"
and i > 0
and re.search(r"[s]$", tokens[i - 1])
):
# Left shift on single quote for possessives ending
# in "s", e.g. "The Jones' house"
detokenized_text += token
prepend_space = " "
else:
# Right shift.
detokenized_text += prepend_space + token
prepend_space = ""
quote_counts[normalized_quo] += 1
else:
# Left shift.
text += token
prepend_space = " "
quote_counts[normalized_quo] += 1
elif (
self.lang == "fi"
and re.search(r":$", tokens[i - 1])
and re.search(self.FINNISH_REGEX, token)
):
# Finnish : without intervening space if followed by case suffix
# EU:N EU:n EU:ssa EU:sta EU:hun EU:iin ...
detokenized_text += prepend_space + token
prepend_space = " "
else:
detokenized_text += prepend_space + token
prepend_space = " "
# Merge multiple spaces.
regexp, substitution = self.ONE_SPACE
detokenized_text = re.sub(regexp, substitution, detokenized_text)
# Removes heading and trailing spaces.
detokenized_text = detokenized_text.strip()
return detokenized_text if return_str else detokenized_text.split()
|
def tokenize(self, tokens, return_str=False):
"""
Python port of the Moses detokenizer.
:param tokens: A list of strings, i.e. tokenized text.
:type tokens: list(str)
:return: str
"""
# Convert the list of tokens into a string and pad it with spaces.
text = " {} ".format(" ".join(tokens))
# Converts input string into unicode.
text = text_type(text)
# Detokenize the agressive hyphen split.
regexp, substitution = self.AGGRESSIVE_HYPHEN_SPLIT
text = re.sub(regexp, substitution, text)
# Unescape the XML symbols.
text = self.unescape_xml(text)
# Keep track of no. of quotation marks.
quote_counts = {"'": 0, '"': 0, "``": 0, "`": 0, "''": 0}
# The *prepend_space* variable is used to control the "effects" of
# detokenization as the function loops through the list of tokens and
# changes the *prepend_space* accordingly as it sequentially checks
# through the language specific and language independent conditions.
prepend_space = " "
detokenized_text = ""
tokens = text.split()
# Iterate through every token and apply language specific detokenization rule(s).
for i, token in enumerate(iter(tokens)):
# Check if the first char is CJK.
if is_cjk(token[0]):
# Perform left shift if this is a second consecutive CJK word.
if i > 0 and is_cjk(token[-1]):
detokenized_text += token
# But do nothing special if this is a CJK word that doesn't follow a CJK word
else:
detokenized_text += prepend_space + token
prepend_space = " "
# If it's a currency symbol.
elif token in self.IsSc:
# Perform right shift on currency and other random punctuation items
detokenized_text += prepend_space + token
prepend_space = ""
elif re.match(r"^[\,\.\?\!\:\;\\\%\}\]\)]+$", token):
# In French, these punctuations are prefixed with a non-breakable space.
if self.lang == "fr" and re.match(r"^[\?\!\:\;\\\%]$", token):
detokenized_text += " "
# Perform left shift on punctuation items.
detokenized_text += token
prepend_space = " "
elif (
self.lang == "en"
and i > 0
and re.match("^['][{}]".format(self.IsAlpha), token)
and re.match("[{}]".format(self.IsAlnum), token)
):
# For English, left-shift the contraction.
detokenized_text += token
prepend_space = " "
elif (
self.lang == "cs"
and i > 1
and re.match(
r"^[0-9]+$", tokens[-2]
) # If the previous previous token is a number.
and re.match(r"^[.,]$", tokens[-1]) # If previous token is a dot.
and re.match(r"^[0-9]+$", token)
): # If the current token is a number.
# In Czech, left-shift floats that are decimal numbers.
detokenized_text += token
prepend_space = " "
elif (
self.lang in ["fr", "it"]
and i <= len(tokens) - 2
and re.match("[{}][']$".format(self.IsAlpha), token)
and re.match("^[{}]$".format(self.IsAlpha), tokens[i + 1])
): # If the next token is alpha.
# For French and Italian, right-shift the contraction.
detokenized_text += prepend_space + token
prepend_space = ""
elif (
self.lang == "cs"
and i <= len(tokens) - 3
and re.match("[{}][']$".format(self.IsAlpha), token)
and re.match("^[-β]$", tokens[i + 1])
and re.match("^li$|^mail.*", tokens[i + 2], re.IGNORECASE)
): # In Perl, ($words[$i+2] =~ /^li$|^mail.*/i)
# In Czech, right-shift "-li" and a few Czech dashed words (e.g. e-mail)
detokenized_text += prepend_space + token + tokens[i + 1]
next(tokens, None) # Advance over the dash
prepend_space = ""
# Combine punctuation smartly.
elif re.match(r"""^[\'\"ββ`]+$""", token):
normalized_quo = token
if re.match(r"^[βββ]+$", token):
normalized_quo = '"'
quote_counts.get(normalized_quo, 0)
if self.lang == "cs" and token == "β":
quote_counts[normalized_quo] = 0
if self.lang == "cs" and token == "β":
quote_counts[normalized_quo] = 1
if quote_counts[normalized_quo] % 2 == 0:
if (
self.lang == "en"
and token == "'"
and i > 0
and re.match(r"[s]$", tokens[i - 1])
):
# Left shift on single quote for possessives ending
# in "s", e.g. "The Jones' house"
detokenized_text += token
prepend_space = " "
else:
# Right shift.
detokenized_text += prepend_space + token
prepend_space = ""
quote_counts[normalized_quo] += 1
else:
# Left shift.
text += token
prepend_space = " "
quote_counts[normalized_quo] += 1
elif (
self.lang == "fi"
and re.match(r":$", tokens[i - 1])
and re.match(self.FINNISH_REGEX, token)
):
# Finnish : without intervening space if followed by case suffix
# EU:N EU:n EU:ssa EU:sta EU:hun EU:iin ...
detokenized_text += prepend_space + token
prepend_space = " "
else:
detokenized_text += prepend_space + token
prepend_space = " "
# Merge multiple spaces.
regexp, substitution = self.ONE_SPACE
detokenized_text = re.sub(regexp, substitution, detokenized_text)
# Removes heading and trailing spaces.
detokenized_text = detokenized_text.strip()
return detokenized_text if return_str else detokenized_text.split()
|
https://github.com/nltk/nltk/issues/1551
|
$ python -c 'from nltk.tokenize.moses import MosesTokenizer; m = MosesTokenizer(); m.penn_tokenize("this aint funny")'
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "nltk/tokenize/moses.py", line 299, in penn_tokenize
text = re.sub(regexp, subsitution, text)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 155, in sub
return _compile(pattern, flags).sub(repl, string, count)
File "/System/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/re.py", line 251, in _compile
raise error, v # invalid expression
sre_constants.error: unbalanced parenthesis
|
sre_constants.error
|
def _update_index(self, url=None):
"""A helper function that ensures that self._index is
up-to-date. If the index is older than self.INDEX_TIMEOUT,
then download it again."""
# Check if the index is aleady up-to-date. If so, do nothing.
if not (
self._index is None
or url is not None
or time.time() - self._index_timestamp > self.INDEX_TIMEOUT
):
return
# If a URL was specified, then update our URL.
self._url = url or self._url
# Download the index file.
self._index = nltk.internals.ElementWrapper(
ElementTree.parse(compat.urlopen(self._url)).getroot()
)
self._index_timestamp = time.time()
# Build a dictionary of packages.
packages = [Package.fromxml(p) for p in self._index.findall("packages/package")]
self._packages = dict((p.id, p) for p in packages)
# Build a dictionary of collections.
collections = [
Collection.fromxml(c) for c in self._index.findall("collections/collection")
]
self._collections = dict((c.id, c) for c in collections)
# Replace identifiers with actual children in collection.children.
for collection in self._collections.values():
for i, child_id in enumerate(collection.children):
if child_id in self._packages:
collection.children[i] = self._packages[child_id]
elif child_id in self._collections:
collection.children[i] = self._collections[child_id]
else:
print("removing collection member with no package: {}".format(child_id))
del collection.children[i]
# Fill in collection.packages for each collection.
for collection in self._collections.values():
packages = {}
queue = [collection]
for child in queue:
if isinstance(child, Collection):
queue.extend(child.children)
else:
packages[child.id] = child
collection.packages = packages.values()
# Flush the status cache
self._status_cache.clear()
|
def _update_index(self, url=None):
"""A helper function that ensures that self._index is
up-to-date. If the index is older than self.INDEX_TIMEOUT,
then download it again."""
# Check if the index is aleady up-to-date. If so, do nothing.
if not (
self._index is None
or url is not None
or time.time() - self._index_timestamp > self.INDEX_TIMEOUT
):
return
# If a URL was specified, then update our URL.
self._url = url or self._url
# Download the index file.
self._index = nltk.internals.ElementWrapper(
ElementTree.parse(compat.urlopen(self._url)).getroot()
)
self._index_timestamp = time.time()
# Build a dictionary of packages.
packages = [Package.fromxml(p) for p in self._index.findall("packages/package")]
self._packages = dict((p.id, p) for p in packages)
# Build a dictionary of collections.
collections = [
Collection.fromxml(c) for c in self._index.findall("collections/collection")
]
self._collections = dict((c.id, c) for c in collections)
# Replace identifiers with actual children in collection.children.
for collection in self._collections.values():
for i, child_id in enumerate(collection.children):
if child_id in self._packages:
collection.children[i] = self._packages[child_id]
if child_id in self._collections:
collection.children[i] = self._collections[child_id]
# Fill in collection.packages for each collection.
for collection in self._collections.values():
packages = {}
queue = [collection]
for child in queue:
if isinstance(child, Collection):
queue.extend(child.children)
else:
packages[child.id] = child
collection.packages = packages.values()
# Flush the status cache
self._status_cache.clear()
|
https://github.com/nltk/nltk/issues/882
|
$ sudo python -m nltk.downloader
Traceback (most recent call last):
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/runpy.py", line 170, in _run_module_as_main
"__main__", mod_spec)
File "/opt/local/Library/Frameworks/Python.framework/Versions/3.4/lib/python3.4/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/Users/sb/git/nltk/nltk/downloader.py", line 2266, in <module>
halt_on_error=options.halt_on_error)
File "/Users/sb/git/nltk/nltk/downloader.py", line 655, in download
self._interactive_download()
File "/Users/sb/git/nltk/nltk/downloader.py", line 967, in _interactive_download
DownloaderGUI(self).mainloop()
File "/Users/sb/git/nltk/nltk/downloader.py", line 1227, in __init__
self._fill_table()
File "/Users/sb/git/nltk/nltk/downloader.py", line 1523, in _fill_table
items = self._ds.collections()
File "/Users/sb/git/nltk/nltk/downloader.py", line 499, in collections
self._update_index()
File "/Users/sb/git/nltk/nltk/downloader.py", line 854, in _update_index
packages[child.id] = child
AttributeError: 'str' object has no attribute 'id'
|
AttributeError
|
def getattr_value(self, val):
if isinstance(val, string_types):
val = getattr(self, val)
if isinstance(val, tt.TensorVariable):
return val.tag.test_value
if isinstance(val, tt.sharedvar.SharedVariable):
return val.get_value()
if isinstance(val, theano_constant):
return val.value
return val
|
def getattr_value(self, val):
if isinstance(val, string_types):
val = getattr(self, val)
if isinstance(val, tt.TensorVariable):
return val.tag.test_value
if isinstance(val, tt.sharedvar.TensorSharedVariable):
return val.get_value()
if isinstance(val, theano_constant):
return val.value
return val
|
https://github.com/pymc-devs/pymc3/issues/3139
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-6-6131815c61f7> in <module>()
4 a = pm.Lognormal('a',mu=product_mu_shared, sd=product_sd)
5 b = pm.Normal('b',mu=0.0, sd=product_sd)
----> 6 d = pm.Normal('d', mu=product_mu_shared, sd=product_sd)
7
8
C:\ProgramData\Anaconda3\lib\site-packages\pymc3\distributions\distribution.py in __new__(cls, name, *args, **kwargs)
40 total_size = kwargs.pop('total_size', None)
41 dist = cls.dist(*args, **kwargs)
---> 42 return model.Var(name, dist, data, total_size)
43 else:
44 raise TypeError("Name needs to be a string but got: {}".format(name))
C:\ProgramData\Anaconda3\lib\site-packages\pymc3\model.py in Var(self, name, dist, data, total_size)
806 with self:
807 var = FreeRV(name=name, distribution=dist,
--> 808 total_size=total_size, model=self)
809 self.free_RVs.append(var)
810 else:
C:\ProgramData\Anaconda3\lib\site-packages\pymc3\model.py in __init__(self, type, owner, index, name, distribution, total_size, model)
1205 self.distribution = distribution
1206 self.tag.test_value = np.ones(
-> 1207 distribution.shape, distribution.dtype) * distribution.default()
1208 self.logp_elemwiset = distribution.logp(self)
1209 # The logp might need scaling in minibatches.
C:\ProgramData\Anaconda3\lib\site-packages\pymc3\distributions\distribution.py in default(self)
65
66 def default(self):
---> 67 return np.asarray(self.get_test_val(self.testval, self.defaults), self.dtype)
68
69 def get_test_val(self, val, defaults):
C:\ProgramData\Anaconda3\lib\site-packages\pymc3\distributions\distribution.py in get_test_val(self, val, defaults)
70 if val is None:
71 for v in defaults:
---> 72 if hasattr(self, v) and np.all(np.isfinite(self.getattr_value(v))):
73 return self.getattr_value(v)
74 else:
TypeError: ufunc 'isfinite' not supported for the input types, and the inputs could not be safely coerced to any supported types according to the casting rule ''safe''```
**Please provide any additional information below.**
## Versions and main components
* PyMC3 Version: 3.5
* Theano Version: 1.0.2
* Python Version: 3.6
* Operating system: Windows 10
* How did you install PyMC3: (conda/pip) conda-forge
|
TypeError
|
def sample(
draws=1000,
step=None,
init="auto",
n_init=200000,
start=None,
trace=None,
chain_idx=0,
chains=None,
cores=None,
tune=1000,
progressbar=True,
model=None,
random_seed=None,
discard_tuned_samples=True,
compute_convergence_checks=True,
callback=None,
*,
return_inferencedata=None,
idata_kwargs: dict = None,
mp_ctx=None,
pickle_backend: str = "pickle",
**kwargs,
):
r"""Draw samples from the posterior using the given step methods.
Multiple step methods are supported via compound step methods.
Parameters
----------
draws : int
The number of samples to draw. Defaults to 1000. The number of tuned samples are discarded
by default. See ``discard_tuned_samples``.
init : str
Initialization method to use for auto-assigned NUTS samplers.
* auto: Choose a default initialization method automatically.
Currently, this is ``jitter+adapt_diag``, but this can change in the future.
If you depend on the exact behaviour, choose an initialization method explicitly.
* adapt_diag: Start with a identity mass matrix and then adapt a diagonal based on the
variance of the tuning samples. All chains use the test value (usually the prior mean)
as starting point.
* jitter+adapt_diag: Same as ``adapt_diag``, but add uniform jitter in [-1, 1] to the
starting point in each chain.
* advi+adapt_diag: Run ADVI and then adapt the resulting diagonal mass matrix based on the
sample variance of the tuning samples.
* advi+adapt_diag_grad: Run ADVI and then adapt the resulting diagonal mass matrix based
on the variance of the gradients during tuning. This is **experimental** and might be
removed in a future release.
* advi: Run ADVI to estimate posterior mean and diagonal mass matrix.
* advi_map: Initialize ADVI with MAP and use MAP as starting point.
* map: Use the MAP as starting point. This is discouraged.
* adapt_full: Adapt a dense mass matrix using the sample covariances
step : function or iterable of functions
A step function or collection of functions. If there are variables without step methods,
step methods for those variables will be assigned automatically. By default the NUTS step
method will be used, if appropriate to the model; this is a good default for beginning
users.
n_init : int
Number of iterations of initializer. Only works for 'ADVI' init methods.
start : dict, or array of dict
Starting point in parameter space (or partial point)
Defaults to ``trace.point(-1))`` if there is a trace provided and model.test_point if not
(defaults to empty dict). Initialization methods for NUTS (see ``init`` keyword) can
overwrite the default.
trace : backend, list, or MultiTrace
This should be a backend instance, a list of variables to track, or a MultiTrace object
with past values. If a MultiTrace object is given, it must contain samples for the chain
number ``chain``. If None or a list of variables, the NDArray backend is used.
chain_idx : int
Chain number used to store sample in backend. If ``chains`` is greater than one, chain
numbers will start here.
chains : int
The number of chains to sample. Running independent chains is important for some
convergence statistics and can also reveal multiple modes in the posterior. If ``None``,
then set to either ``cores`` or 2, whichever is larger.
cores : int
The number of chains to run in parallel. If ``None``, set to the number of CPUs in the
system, but at most 4.
tune : int
Number of iterations to tune, defaults to 1000. Samplers adjust the step sizes, scalings or
similar during tuning. Tuning samples will be drawn in addition to the number specified in
the ``draws`` argument, and will be discarded unless ``discard_tuned_samples`` is set to
False.
progressbar : bool, optional default=True
Whether or not to display a progress bar in the command line. The bar shows the percentage
of completion, the sampling speed in samples per second (SPS), and the estimated remaining
time until completion ("expected time of arrival"; ETA).
model : Model (optional if in ``with`` context)
random_seed : int or list of ints
A list is accepted if ``cores`` is greater than one.
discard_tuned_samples : bool
Whether to discard posterior samples of the tune interval.
compute_convergence_checks : bool, default=True
Whether to compute sampler statistics like Gelman-Rubin and ``effective_n``.
callback : function, default=None
A function which gets called for every sample from the trace of a chain. The function is
called with the trace and the current draw and will contain all samples for a single trace.
the ``draw.chain`` argument can be used to determine which of the active chains the sample
is drawn from.
Sampling can be interrupted by throwing a ``KeyboardInterrupt`` in the callback.
return_inferencedata : bool, default=False
Whether to return the trace as an :class:`arviz:arviz.InferenceData` (True) object or a `MultiTrace` (False)
Defaults to `False`, but we'll switch to `True` in an upcoming release.
idata_kwargs : dict, optional
Keyword arguments for :func:`arviz:arviz.from_pymc3`
mp_ctx : multiprocessing.context.BaseContent
A multiprocessing context for parallel sampling. See multiprocessing
documentation for details.
pickle_backend : str
One of `'pickle'` or `'dill'`. The library used to pickle models
in parallel sampling if the multiprocessing context is not of type
`fork`.
Returns
-------
trace : pymc3.backends.base.MultiTrace or arviz.InferenceData
A ``MultiTrace`` or ArviZ ``InferenceData`` object that contains the samples.
Notes
-----
Optional keyword arguments can be passed to ``sample`` to be delivered to the
``step_method``\ s used during sampling.
If your model uses only one step method, you can address step method kwargs
directly. In particular, the NUTS step method has several options including:
* target_accept : float in [0, 1]. The step size is tuned such that we
approximate this acceptance rate. Higher values like 0.9 or 0.95 often
work better for problematic posteriors
* max_treedepth : The maximum depth of the trajectory tree
* step_scale : float, default 0.25
The initial guess for the step size scaled down by :math:`1/n**(1/4)`
If your model uses multiple step methods, aka a Compound Step, then you have
two ways to address arguments to each step method:
A. If you let ``sample()`` automatically assign the ``step_method``\ s,
and you can correctly anticipate what they will be, then you can wrap
step method kwargs in a dict and pass that to sample() with a kwarg set
to the name of the step method.
e.g. for a CompoundStep comprising NUTS and BinaryGibbsMetropolis,
you could send:
1. ``target_accept`` to NUTS: nuts={'target_accept':0.9}
2. ``transit_p`` to BinaryGibbsMetropolis: binary_gibbs_metropolis={'transit_p':.7}
Note that available names are:
``nuts``, ``hmc``, ``metropolis``, ``binary_metropolis``,
``binary_gibbs_metropolis``, ``categorical_gibbs_metropolis``,
``DEMetropolis``, ``DEMetropolisZ``, ``slice``
B. If you manually declare the ``step_method``\ s, within the ``step``
kwarg, then you can address the ``step_method`` kwargs directly.
e.g. for a CompoundStep comprising NUTS and BinaryGibbsMetropolis,
you could send ::
step=[pm.NUTS([freeRV1, freeRV2], target_accept=0.9),
pm.BinaryGibbsMetropolis([freeRV3], transit_p=.7)]
You can find a full list of arguments in the docstring of the step methods.
Examples
--------
.. code:: ipython
In [1]: import pymc3 as pm
...: n = 100
...: h = 61
...: alpha = 2
...: beta = 2
In [2]: with pm.Model() as model: # context management
...: p = pm.Beta("p", alpha=alpha, beta=beta)
...: y = pm.Binomial("y", n=n, p=p, observed=h)
...: trace = pm.sample()
In [3]: pm.summary(trace, kind="stats")
Out[3]:
mean sd hdi_3% hdi_97%
p 0.609 0.047 0.528 0.699
"""
model = modelcontext(model)
if start is None:
start = model.test_point
else:
if isinstance(start, dict):
update_start_vals(start, model.test_point, model)
else:
for chain_start_vals in start:
update_start_vals(chain_start_vals, model.test_point, model)
check_start_vals(start, model)
if cores is None:
cores = min(4, _cpu_count())
if chains is None:
chains = max(2, cores)
if isinstance(start, dict):
start = [start] * chains
if random_seed == -1:
random_seed = None
if chains == 1 and isinstance(random_seed, int):
random_seed = [random_seed]
if random_seed is None or isinstance(random_seed, int):
if random_seed is not None:
np.random.seed(random_seed)
random_seed = [np.random.randint(2**30) for _ in range(chains)]
if not isinstance(random_seed, Iterable):
raise TypeError("Invalid value for `random_seed`. Must be tuple, list or int")
if not discard_tuned_samples and not return_inferencedata:
warnings.warn(
"Tuning samples will be included in the returned `MultiTrace` object, which can lead to"
" complications in your downstream analysis. Please consider to switch to `InferenceData`:\n"
"`pm.sample(..., return_inferencedata=True)`",
UserWarning,
)
if return_inferencedata is None:
v = packaging.version.parse(pm.__version__)
if v.release[0] > 3 or v.release[1] >= 10: # type: ignore
warnings.warn(
"In an upcoming release, pm.sample will return an `arviz.InferenceData` object instead of a `MultiTrace` by default. "
"You can pass return_inferencedata=True or return_inferencedata=False to be safe and silence this warning.",
FutureWarning,
)
# set the default
return_inferencedata = False
if start is not None:
for start_vals in start:
_check_start_shape(model, start_vals)
# small trace warning
if draws == 0:
msg = "Tuning was enabled throughout the whole trace."
_log.warning(msg)
elif draws < 500:
msg = "Only %s samples in chain." % draws
_log.warning(msg)
draws += tune
if model.ndim == 0:
raise ValueError("The model does not contain any free variables.")
if step is None and init is not None and all_continuous(model.vars):
try:
# By default, try to use NUTS
_log.info("Auto-assigning NUTS sampler...")
start_, step = init_nuts(
init=init,
chains=chains,
n_init=n_init,
model=model,
random_seed=random_seed,
progressbar=progressbar,
**kwargs,
)
check_start_vals(start_, model)
if start is None:
start = start_
except (AttributeError, NotImplementedError, tg.NullTypeGradError):
# gradient computation failed
_log.info(
"Initializing NUTS failed. Falling back to elementwise auto-assignment."
)
_log.debug("Exception in init nuts", exec_info=True)
step = assign_step_methods(model, step, step_kwargs=kwargs)
else:
step = assign_step_methods(model, step, step_kwargs=kwargs)
if isinstance(step, list):
step = CompoundStep(step)
if start is None:
start = {}
if isinstance(start, dict):
start = [start] * chains
sample_args = {
"draws": draws,
"step": step,
"start": start,
"trace": trace,
"chain": chain_idx,
"chains": chains,
"tune": tune,
"progressbar": progressbar,
"model": model,
"random_seed": random_seed,
"cores": cores,
"callback": callback,
"discard_tuned_samples": discard_tuned_samples,
}
parallel_args = {
"pickle_backend": pickle_backend,
"mp_ctx": mp_ctx,
}
sample_args.update(kwargs)
has_population_samplers = np.any(
[
isinstance(m, arraystep.PopulationArrayStepShared)
for m in (step.methods if isinstance(step, CompoundStep) else [step])
]
)
parallel = cores > 1 and chains > 1 and not has_population_samplers
t_start = time.time()
if parallel:
_log.info(f"Multiprocess sampling ({chains} chains in {cores} jobs)")
_print_step_hierarchy(step)
try:
trace = _mp_sample(**sample_args, **parallel_args)
except pickle.PickleError:
_log.warning("Could not pickle model, sampling singlethreaded.")
_log.debug("Pickling error:", exec_info=True)
parallel = False
except AttributeError as e:
if str(e).startswith("AttributeError: Can't pickle"):
_log.warning("Could not pickle model, sampling singlethreaded.")
_log.debug("Pickling error:", exec_info=True)
parallel = False
else:
raise
if not parallel:
if has_population_samplers:
has_demcmc = np.any(
[
isinstance(m, DEMetropolis)
for m in (
step.methods if isinstance(step, CompoundStep) else [step]
)
]
)
_log.info(f"Population sampling ({chains} chains)")
if has_demcmc and chains < 3:
raise ValueError(
"DEMetropolis requires at least 3 chains. "
"For this {}-dimensional model you should use β₯{} chains".format(
model.ndim, model.ndim + 1
)
)
if has_demcmc and chains <= model.ndim:
warnings.warn(
"DEMetropolis should be used with more chains than dimensions! "
"(The model has {} dimensions.)".format(model.ndim),
UserWarning,
)
_print_step_hierarchy(step)
trace = _sample_population(parallelize=cores > 1, **sample_args)
else:
_log.info(f"Sequential sampling ({chains} chains in 1 job)")
_print_step_hierarchy(step)
trace = _sample_many(**sample_args)
t_sampling = time.time() - t_start
# count the number of tune/draw iterations that happened
# ideally via the "tune" statistic, but not all samplers record it!
if "tune" in trace.stat_names:
stat = trace.get_sampler_stats("tune", chains=0)
# when CompoundStep is used, the stat is 2 dimensional!
if len(stat.shape) == 2:
stat = stat[:, 0]
stat = tuple(stat)
n_tune = stat.count(True)
n_draws = stat.count(False)
else:
# these may be wrong when KeyboardInterrupt happened, but they're better than nothing
n_tune = min(tune, len(trace))
n_draws = max(0, len(trace) - n_tune)
if discard_tuned_samples:
trace = trace[n_tune:]
# save metadata in SamplerReport
trace.report._n_tune = n_tune
trace.report._n_draws = n_draws
trace.report._t_sampling = t_sampling
if "variable_inclusion" in trace.stat_names:
variable_inclusion = np.stack(
trace.get_sampler_stats("variable_inclusion")
).mean(0)
trace.report.variable_importance = variable_inclusion / variable_inclusion.sum()
n_chains = len(trace.chains)
_log.info(
f"Sampling {n_chains} chain{'s' if n_chains > 1 else ''} for {n_tune:_d} tune and {n_draws:_d} draw iterations "
f"({n_tune * n_chains:_d} + {n_draws * n_chains:_d} draws total) "
f"took {trace.report.t_sampling:.0f} seconds."
)
idata = None
if compute_convergence_checks or return_inferencedata:
ikwargs = dict(model=model, save_warmup=not discard_tuned_samples)
if idata_kwargs:
ikwargs.update(idata_kwargs)
idata = arviz.from_pymc3(trace, **ikwargs)
if compute_convergence_checks:
if draws - tune < 100:
warnings.warn(
"The number of samples is too small to check convergence reliably."
)
else:
trace.report._run_convergence_checks(idata, model)
trace.report._log_summary()
if return_inferencedata:
return idata
else:
return trace
|
def sample(
draws=1000,
step=None,
init="auto",
n_init=200000,
start=None,
trace=None,
chain_idx=0,
chains=None,
cores=None,
tune=1000,
progressbar=True,
model=None,
random_seed=None,
discard_tuned_samples=True,
compute_convergence_checks=True,
callback=None,
*,
return_inferencedata=None,
idata_kwargs: dict = None,
mp_ctx=None,
pickle_backend: str = "pickle",
**kwargs,
):
"""Draw samples from the posterior using the given step methods.
Multiple step methods are supported via compound step methods.
Parameters
----------
draws : int
The number of samples to draw. Defaults to 1000. The number of tuned samples are discarded
by default. See ``discard_tuned_samples``.
init : str
Initialization method to use for auto-assigned NUTS samplers.
* auto: Choose a default initialization method automatically.
Currently, this is ``jitter+adapt_diag``, but this can change in the future.
If you depend on the exact behaviour, choose an initialization method explicitly.
* adapt_diag: Start with a identity mass matrix and then adapt a diagonal based on the
variance of the tuning samples. All chains use the test value (usually the prior mean)
as starting point.
* jitter+adapt_diag: Same as ``adapt_diag``, but add uniform jitter in [-1, 1] to the
starting point in each chain.
* advi+adapt_diag: Run ADVI and then adapt the resulting diagonal mass matrix based on the
sample variance of the tuning samples.
* advi+adapt_diag_grad: Run ADVI and then adapt the resulting diagonal mass matrix based
on the variance of the gradients during tuning. This is **experimental** and might be
removed in a future release.
* advi: Run ADVI to estimate posterior mean and diagonal mass matrix.
* advi_map: Initialize ADVI with MAP and use MAP as starting point.
* map: Use the MAP as starting point. This is discouraged.
* adapt_full: Adapt a dense mass matrix using the sample covariances
step : function or iterable of functions
A step function or collection of functions. If there are variables without step methods,
step methods for those variables will be assigned automatically. By default the NUTS step
method will be used, if appropriate to the model; this is a good default for beginning
users.
n_init : int
Number of iterations of initializer. Only works for 'ADVI' init methods.
start : dict, or array of dict
Starting point in parameter space (or partial point)
Defaults to ``trace.point(-1))`` if there is a trace provided and model.test_point if not
(defaults to empty dict). Initialization methods for NUTS (see ``init`` keyword) can
overwrite the default.
trace : backend, list, or MultiTrace
This should be a backend instance, a list of variables to track, or a MultiTrace object
with past values. If a MultiTrace object is given, it must contain samples for the chain
number ``chain``. If None or a list of variables, the NDArray backend is used.
chain_idx : int
Chain number used to store sample in backend. If ``chains`` is greater than one, chain
numbers will start here.
chains : int
The number of chains to sample. Running independent chains is important for some
convergence statistics and can also reveal multiple modes in the posterior. If ``None``,
then set to either ``cores`` or 2, whichever is larger.
cores : int
The number of chains to run in parallel. If ``None``, set to the number of CPUs in the
system, but at most 4.
tune : int
Number of iterations to tune, defaults to 1000. Samplers adjust the step sizes, scalings or
similar during tuning. Tuning samples will be drawn in addition to the number specified in
the ``draws`` argument, and will be discarded unless ``discard_tuned_samples`` is set to
False.
progressbar : bool, optional default=True
Whether or not to display a progress bar in the command line. The bar shows the percentage
of completion, the sampling speed in samples per second (SPS), and the estimated remaining
time until completion ("expected time of arrival"; ETA).
model : Model (optional if in ``with`` context)
random_seed : int or list of ints
A list is accepted if ``cores`` is greater than one.
discard_tuned_samples : bool
Whether to discard posterior samples of the tune interval.
compute_convergence_checks : bool, default=True
Whether to compute sampler statistics like Gelman-Rubin and ``effective_n``.
callback : function, default=None
A function which gets called for every sample from the trace of a chain. The function is
called with the trace and the current draw and will contain all samples for a single trace.
the ``draw.chain`` argument can be used to determine which of the active chains the sample
is drawn from.
Sampling can be interrupted by throwing a ``KeyboardInterrupt`` in the callback.
return_inferencedata : bool, default=False
Whether to return the trace as an :class:`arviz:arviz.InferenceData` (True) object or a `MultiTrace` (False)
Defaults to `False`, but we'll switch to `True` in an upcoming release.
idata_kwargs : dict, optional
Keyword arguments for :func:`arviz:arviz.from_pymc3`
mp_ctx : multiprocessing.context.BaseContent
A multiprocessing context for parallel sampling. See multiprocessing
documentation for details.
pickle_backend : str
One of `'pickle'` or `'dill'`. The library used to pickle models
in parallel sampling if the multiprocessing context is not of type
`fork`.
Returns
-------
trace : pymc3.backends.base.MultiTrace or arviz.InferenceData
A ``MultiTrace`` or ArviZ ``InferenceData`` object that contains the samples.
Notes
-----
Optional keyword arguments can be passed to ``sample`` to be delivered to the
``step_method``s used during sampling.
If your model uses only one step method, you can address step method kwargs
directly. In particular, the NUTS step method has several options including:
* target_accept : float in [0, 1]. The step size is tuned such that we
approximate this acceptance rate. Higher values like 0.9 or 0.95 often
work better for problematic posteriors
* max_treedepth : The maximum depth of the trajectory tree
* step_scale : float, default 0.25
The initial guess for the step size scaled down by :math:`1/n**(1/4)`
If your model uses multiple step methods, aka a Compound Step, then you have
two ways to address arguments to each step method:
A: If you let ``sample()`` automatically assign the ``step_method``s,
and you can correctly anticipate what they will be, then you can wrap
step method kwargs in a dict and pass that to sample() with a kwarg set
to the name of the step method.
e.g. for a CompoundStep comprising NUTS and BinaryGibbsMetropolis,
you could send:
1. ``target_accept`` to NUTS: nuts={'target_accept':0.9}
2. ``transit_p`` to BinaryGibbsMetropolis: binary_gibbs_metropolis={'transit_p':.7}
Note that available names are:
``nuts``, ``hmc``, ``metropolis``, ``binary_metropolis``,
``binary_gibbs_metropolis``, ``categorical_gibbs_metropolis``,
``DEMetropolis``, ``DEMetropolisZ``, ``slice``
B: If you manually declare the ``step_method``s, within the ``step``
kwarg, then you can address the ``step_method`` kwargs directly.
e.g. for a CompoundStep comprising NUTS and BinaryGibbsMetropolis,
you could send:
step=[pm.NUTS([freeRV1, freeRV2], target_accept=0.9),
pm.BinaryGibbsMetropolis([freeRV3], transit_p=.7)]
You can find a full list of arguments in the docstring of the step methods.
Examples
--------
.. code:: ipython
>>> import pymc3 as pm
... n = 100
... h = 61
... alpha = 2
... beta = 2
.. code:: ipython
>>> with pm.Model() as model: # context management
... p = pm.Beta('p', alpha=alpha, beta=beta)
... y = pm.Binomial('y', n=n, p=p, observed=h)
... trace = pm.sample()
>>> pm.summary(trace)
mean sd mc_error hpd_2.5 hpd_97.5
p 0.604625 0.047086 0.00078 0.510498 0.694774
"""
model = modelcontext(model)
if start is None:
start = model.test_point
else:
if isinstance(start, dict):
update_start_vals(start, model.test_point, model)
else:
for chain_start_vals in start:
update_start_vals(chain_start_vals, model.test_point, model)
check_start_vals(start, model)
if cores is None:
cores = min(4, _cpu_count())
if chains is None:
chains = max(2, cores)
if isinstance(start, dict):
start = [start] * chains
if random_seed == -1:
random_seed = None
if chains == 1 and isinstance(random_seed, int):
random_seed = [random_seed]
if random_seed is None or isinstance(random_seed, int):
if random_seed is not None:
np.random.seed(random_seed)
random_seed = [np.random.randint(2**30) for _ in range(chains)]
if not isinstance(random_seed, Iterable):
raise TypeError("Invalid value for `random_seed`. Must be tuple, list or int")
if not discard_tuned_samples and not return_inferencedata:
warnings.warn(
"Tuning samples will be included in the returned `MultiTrace` object, which can lead to"
" complications in your downstream analysis. Please consider to switch to `InferenceData`:\n"
"`pm.sample(..., return_inferencedata=True)`",
UserWarning,
)
if return_inferencedata is None:
v = packaging.version.parse(pm.__version__)
if v.release[0] > 3 or v.release[1] >= 10: # type: ignore
warnings.warn(
"In an upcoming release, pm.sample will return an `arviz.InferenceData` object instead of a `MultiTrace` by default. "
"You can pass return_inferencedata=True or return_inferencedata=False to be safe and silence this warning.",
FutureWarning,
)
# set the default
return_inferencedata = False
if start is not None:
for start_vals in start:
_check_start_shape(model, start_vals)
# small trace warning
if draws == 0:
msg = "Tuning was enabled throughout the whole trace."
_log.warning(msg)
elif draws < 500:
msg = "Only %s samples in chain." % draws
_log.warning(msg)
draws += tune
if model.ndim == 0:
raise ValueError("The model does not contain any free variables.")
if step is None and init is not None and all_continuous(model.vars):
try:
# By default, try to use NUTS
_log.info("Auto-assigning NUTS sampler...")
start_, step = init_nuts(
init=init,
chains=chains,
n_init=n_init,
model=model,
random_seed=random_seed,
progressbar=progressbar,
**kwargs,
)
check_start_vals(start_, model)
if start is None:
start = start_
except (AttributeError, NotImplementedError, tg.NullTypeGradError):
# gradient computation failed
_log.info(
"Initializing NUTS failed. Falling back to elementwise auto-assignment."
)
_log.debug("Exception in init nuts", exec_info=True)
step = assign_step_methods(model, step, step_kwargs=kwargs)
else:
step = assign_step_methods(model, step, step_kwargs=kwargs)
if isinstance(step, list):
step = CompoundStep(step)
if start is None:
start = {}
if isinstance(start, dict):
start = [start] * chains
sample_args = {
"draws": draws,
"step": step,
"start": start,
"trace": trace,
"chain": chain_idx,
"chains": chains,
"tune": tune,
"progressbar": progressbar,
"model": model,
"random_seed": random_seed,
"cores": cores,
"callback": callback,
"discard_tuned_samples": discard_tuned_samples,
}
parallel_args = {
"pickle_backend": pickle_backend,
"mp_ctx": mp_ctx,
}
sample_args.update(kwargs)
has_population_samplers = np.any(
[
isinstance(m, arraystep.PopulationArrayStepShared)
for m in (step.methods if isinstance(step, CompoundStep) else [step])
]
)
parallel = cores > 1 and chains > 1 and not has_population_samplers
t_start = time.time()
if parallel:
_log.info(f"Multiprocess sampling ({chains} chains in {cores} jobs)")
_print_step_hierarchy(step)
try:
trace = _mp_sample(**sample_args, **parallel_args)
except pickle.PickleError:
_log.warning("Could not pickle model, sampling singlethreaded.")
_log.debug("Pickling error:", exec_info=True)
parallel = False
except AttributeError as e:
if str(e).startswith("AttributeError: Can't pickle"):
_log.warning("Could not pickle model, sampling singlethreaded.")
_log.debug("Pickling error:", exec_info=True)
parallel = False
else:
raise
if not parallel:
if has_population_samplers:
has_demcmc = np.any(
[
isinstance(m, DEMetropolis)
for m in (
step.methods if isinstance(step, CompoundStep) else [step]
)
]
)
_log.info(f"Population sampling ({chains} chains)")
if has_demcmc and chains < 3:
raise ValueError(
"DEMetropolis requires at least 3 chains. "
"For this {}-dimensional model you should use β₯{} chains".format(
model.ndim, model.ndim + 1
)
)
if has_demcmc and chains <= model.ndim:
warnings.warn(
"DEMetropolis should be used with more chains than dimensions! "
"(The model has {} dimensions.)".format(model.ndim),
UserWarning,
)
_print_step_hierarchy(step)
trace = _sample_population(parallelize=cores > 1, **sample_args)
else:
_log.info(f"Sequential sampling ({chains} chains in 1 job)")
_print_step_hierarchy(step)
trace = _sample_many(**sample_args)
t_sampling = time.time() - t_start
# count the number of tune/draw iterations that happened
# ideally via the "tune" statistic, but not all samplers record it!
if "tune" in trace.stat_names:
stat = trace.get_sampler_stats("tune", chains=0)
# when CompoundStep is used, the stat is 2 dimensional!
if len(stat.shape) == 2:
stat = stat[:, 0]
stat = tuple(stat)
n_tune = stat.count(True)
n_draws = stat.count(False)
else:
# these may be wrong when KeyboardInterrupt happened, but they're better than nothing
n_tune = min(tune, len(trace))
n_draws = max(0, len(trace) - n_tune)
if discard_tuned_samples:
trace = trace[n_tune:]
# save metadata in SamplerReport
trace.report._n_tune = n_tune
trace.report._n_draws = n_draws
trace.report._t_sampling = t_sampling
if "variable_inclusion" in trace.stat_names:
variable_inclusion = np.stack(
trace.get_sampler_stats("variable_inclusion")
).mean(0)
trace.report.variable_importance = variable_inclusion / variable_inclusion.sum()
n_chains = len(trace.chains)
_log.info(
f"Sampling {n_chains} chain{'s' if n_chains > 1 else ''} for {n_tune:_d} tune and {n_draws:_d} draw iterations "
f"({n_tune * n_chains:_d} + {n_draws * n_chains:_d} draws total) "
f"took {trace.report.t_sampling:.0f} seconds."
)
idata = None
if compute_convergence_checks or return_inferencedata:
ikwargs = dict(model=model, save_warmup=not discard_tuned_samples)
if idata_kwargs:
ikwargs.update(idata_kwargs)
idata = arviz.from_pymc3(trace, **ikwargs)
if compute_convergence_checks:
if draws - tune < 100:
warnings.warn(
"The number of samples is too small to check convergence reliably."
)
else:
trace.report._run_convergence_checks(idata, model)
trace.report._log_summary()
if return_inferencedata:
return idata
else:
return trace
|
https://github.com/pymc-devs/pymc3/issues/4276
|
WARNING: autodoc: failed to import function 't_stick_breaking' from module 'pymc3.distributions.transforms'; the following exception was raised:
Traceback (most recent call last):
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/util/inspect.py", line 334, in safe_getattr
return getattr(obj, name, *defargs)
AttributeError: module 'pymc3.distributions.transforms' has no attribute 't_stick_breaking'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/ext/autodoc/importer.py", line 106, in import_object
obj = attrgetter(obj, mangled_name)
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/ext/autodoc/__init__.py", line 292, in get_attr
return autodoc_attrgetter(self.env.app, obj, name, *defargs)
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/ext/autodoc/__init__.py", line 2242, in autodoc_attrgetter
return safe_getattr(obj, name, *defargs)
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/util/inspect.py", line 350, in safe_getattr
raise AttributeError(name) from exc
AttributeError: t_stick_breaking
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/distributions/transforms.py:docstring of pymc3.distributions.transforms.StickBreaking:4: WARNING: Unexpected indentation.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.init_nuts:31: WARNING: Inline literal start-string without end-string.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.sample:127: WARNING: Inline literal start-string without end-string.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.sample:155: WARNING: Inline literal start-string without end-string.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.sample:149: WARNING: Unexpected indentation.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.sample:161: WARNING: Unexpected indentation.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/backends/base.py:docstring of pymc3.backends.base.MultiTrace:56: WARNING: Block quote ends without a blank line; unexpected unindent.
Extension error:
Handler <function mangle_signature at 0x13d3d4200> for event 'autodoc-process-signature' threw an exception (exception: The section Notes appears twice in the docstring of Elemwise{clip,no_inplace} in None.)
|
AttributeError
|
def init_nuts(
init="auto",
chains=1,
n_init=500000,
model=None,
random_seed=None,
progressbar=True,
**kwargs,
):
"""Set up the mass matrix initialization for NUTS.
NUTS convergence and sampling speed is extremely dependent on the
choice of mass/scaling matrix. This function implements different
methods for choosing or adapting the mass matrix.
Parameters
----------
init : str
Initialization method to use.
* auto: Choose a default initialization method automatically.
Currently, this is `'jitter+adapt_diag'`, but this can change in the future. If you
depend on the exact behaviour, choose an initialization method explicitly.
* adapt_diag: Start with a identity mass matrix and then adapt a diagonal based on the
variance of the tuning samples. All chains use the test value (usually the prior mean)
as starting point.
* jitter+adapt_diag: Same as ``adapt_diag``, but use test value plus a uniform jitter in
[-1, 1] as starting point in each chain.
* advi+adapt_diag: Run ADVI and then adapt the resulting diagonal mass matrix based on the
sample variance of the tuning samples.
* advi+adapt_diag_grad: Run ADVI and then adapt the resulting diagonal mass matrix based
on the variance of the gradients during tuning. This is **experimental** and might be
removed in a future release.
* advi: Run ADVI to estimate posterior mean and diagonal mass matrix.
* advi_map: Initialize ADVI with MAP and use MAP as starting point.
* map: Use the MAP as starting point. This is discouraged.
* adapt_full: Adapt a dense mass matrix using the sample covariances. All chains use the
test value (usually the prior mean) as starting point.
* jitter+adapt_full: Same as ``adapt_full``, but use test value plus a uniform jitter in
[-1, 1] as starting point in each chain.
chains : int
Number of jobs to start.
n_init : int
Number of iterations of initializer. Only works for 'ADVI' init methods.
model : Model (optional if in ``with`` context)
progressbar : bool
Whether or not to display a progressbar for advi sampling.
**kwargs : keyword arguments
Extra keyword arguments are forwarded to pymc3.NUTS.
Returns
-------
start : ``pymc3.model.Point``
Starting point for sampler
nuts_sampler : ``pymc3.step_methods.NUTS``
Instantiated and initialized NUTS sampler object
"""
model = modelcontext(model)
vars = kwargs.get("vars", model.vars)
if set(vars) != set(model.vars):
raise ValueError("Must use init_nuts on all variables of a model.")
if not all_continuous(vars):
raise ValueError(
"init_nuts can only be used for models with only continuous variables."
)
if not isinstance(init, str):
raise TypeError("init must be a string.")
if init is not None:
init = init.lower()
if init == "auto":
init = "jitter+adapt_diag"
_log.info(f"Initializing NUTS using {init}...")
if random_seed is not None:
random_seed = int(np.atleast_1d(random_seed)[0])
np.random.seed(random_seed)
cb = [
pm.callbacks.CheckParametersConvergence(tolerance=1e-2, diff="absolute"),
pm.callbacks.CheckParametersConvergence(tolerance=1e-2, diff="relative"),
]
if init == "adapt_diag":
start = [model.test_point] * chains
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
var = np.ones_like(mean)
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, var, 10)
elif init == "jitter+adapt_diag":
start = []
for _ in range(chains):
mean = {var: val.copy() for var, val in model.test_point.items()}
for val in mean.values():
val[...] += 2 * np.random.rand(*val.shape) - 1
start.append(mean)
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
var = np.ones_like(mean)
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, var, 10)
elif init == "advi+adapt_diag_grad":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
) # type: pm.MeanField
start = approx.sample(draws=chains)
start = list(start)
stds = approx.bij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
mean = approx.bij.rmap(approx.mean.get_value())
mean = model.dict_to_array(mean)
weight = 50
potential = quadpotential.QuadPotentialDiagAdaptGrad(
model.ndim, mean, cov, weight
)
elif init == "advi+adapt_diag":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
) # type: pm.MeanField
start = approx.sample(draws=chains)
start = list(start)
stds = approx.bij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
mean = approx.bij.rmap(approx.mean.get_value())
mean = model.dict_to_array(mean)
weight = 50
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, cov, weight)
elif init == "advi":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
) # type: pm.MeanField
start = approx.sample(draws=chains)
start = list(start)
stds = approx.bij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
potential = quadpotential.QuadPotentialDiag(cov)
elif init == "advi_map":
start = pm.find_MAP(include_transformed=True)
approx = pm.MeanField(model=model, start=start)
pm.fit(
random_seed=random_seed,
n=n_init,
method=pm.KLqp(approx),
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
)
start = approx.sample(draws=chains)
start = list(start)
stds = approx.bij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
potential = quadpotential.QuadPotentialDiag(cov)
elif init == "map":
start = pm.find_MAP(include_transformed=True)
cov = pm.find_hessian(point=start)
start = [start] * chains
potential = quadpotential.QuadPotentialFull(cov)
elif init == "adapt_full":
start = [model.test_point] * chains
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
cov = np.eye(model.ndim)
potential = quadpotential.QuadPotentialFullAdapt(model.ndim, mean, cov, 10)
elif init == "jitter+adapt_full":
start = []
for _ in range(chains):
mean = {var: val.copy() for var, val in model.test_point.items()}
for val in mean.values():
val[...] += 2 * np.random.rand(*val.shape) - 1
start.append(mean)
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
cov = np.eye(model.ndim)
potential = quadpotential.QuadPotentialFullAdapt(model.ndim, mean, cov, 10)
else:
raise ValueError(f"Unknown initializer: {init}.")
step = pm.NUTS(potential=potential, model=model, **kwargs)
return start, step
|
def init_nuts(
init="auto",
chains=1,
n_init=500000,
model=None,
random_seed=None,
progressbar=True,
**kwargs,
):
"""Set up the mass matrix initialization for NUTS.
NUTS convergence and sampling speed is extremely dependent on the
choice of mass/scaling matrix. This function implements different
methods for choosing or adapting the mass matrix.
Parameters
----------
init : str
Initialization method to use.
* auto: Choose a default initialization method automatically.
Currently, this is `'jitter+adapt_diag'`, but this can change in the future. If you
depend on the exact behaviour, choose an initialization method explicitly.
* adapt_diag: Start with a identity mass matrix and then adapt a diagonal based on the
variance of the tuning samples. All chains use the test value (usually the prior mean)
as starting point.
* jitter+adapt_diag: Same as ``adapt_diag``, but use test value plus a uniform jitter in
[-1, 1] as starting point in each chain.
* advi+adapt_diag: Run ADVI and then adapt the resulting diagonal mass matrix based on the
sample variance of the tuning samples.
* advi+adapt_diag_grad: Run ADVI and then adapt the resulting diagonal mass matrix based
on the variance of the gradients during tuning. This is **experimental** and might be
removed in a future release.
* advi: Run ADVI to estimate posterior mean and diagonal mass matrix.
* advi_map: Initialize ADVI with MAP and use MAP as starting point.
* map: Use the MAP as starting point. This is discouraged.
* adapt_full: Adapt a dense mass matrix using the sample covariances. All chains use the
test value (usually the prior mean) as starting point.
* jitter+adapt_full: Same as ``adapt_full`, but use test value plus a uniform jitter in
[-1, 1] as starting point in each chain.
chains : int
Number of jobs to start.
n_init : int
Number of iterations of initializer. Only works for 'ADVI' init methods.
model : Model (optional if in ``with`` context)
progressbar : bool
Whether or not to display a progressbar for advi sampling.
**kwargs : keyword arguments
Extra keyword arguments are forwarded to pymc3.NUTS.
Returns
-------
start : ``pymc3.model.Point``
Starting point for sampler
nuts_sampler : ``pymc3.step_methods.NUTS``
Instantiated and initialized NUTS sampler object
"""
model = modelcontext(model)
vars = kwargs.get("vars", model.vars)
if set(vars) != set(model.vars):
raise ValueError("Must use init_nuts on all variables of a model.")
if not all_continuous(vars):
raise ValueError(
"init_nuts can only be used for models with only continuous variables."
)
if not isinstance(init, str):
raise TypeError("init must be a string.")
if init is not None:
init = init.lower()
if init == "auto":
init = "jitter+adapt_diag"
_log.info(f"Initializing NUTS using {init}...")
if random_seed is not None:
random_seed = int(np.atleast_1d(random_seed)[0])
np.random.seed(random_seed)
cb = [
pm.callbacks.CheckParametersConvergence(tolerance=1e-2, diff="absolute"),
pm.callbacks.CheckParametersConvergence(tolerance=1e-2, diff="relative"),
]
if init == "adapt_diag":
start = [model.test_point] * chains
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
var = np.ones_like(mean)
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, var, 10)
elif init == "jitter+adapt_diag":
start = []
for _ in range(chains):
mean = {var: val.copy() for var, val in model.test_point.items()}
for val in mean.values():
val[...] += 2 * np.random.rand(*val.shape) - 1
start.append(mean)
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
var = np.ones_like(mean)
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, var, 10)
elif init == "advi+adapt_diag_grad":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
) # type: pm.MeanField
start = approx.sample(draws=chains)
start = list(start)
stds = approx.bij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
mean = approx.bij.rmap(approx.mean.get_value())
mean = model.dict_to_array(mean)
weight = 50
potential = quadpotential.QuadPotentialDiagAdaptGrad(
model.ndim, mean, cov, weight
)
elif init == "advi+adapt_diag":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
) # type: pm.MeanField
start = approx.sample(draws=chains)
start = list(start)
stds = approx.bij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
mean = approx.bij.rmap(approx.mean.get_value())
mean = model.dict_to_array(mean)
weight = 50
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, cov, weight)
elif init == "advi":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
) # type: pm.MeanField
start = approx.sample(draws=chains)
start = list(start)
stds = approx.bij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
potential = quadpotential.QuadPotentialDiag(cov)
elif init == "advi_map":
start = pm.find_MAP(include_transformed=True)
approx = pm.MeanField(model=model, start=start)
pm.fit(
random_seed=random_seed,
n=n_init,
method=pm.KLqp(approx),
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
)
start = approx.sample(draws=chains)
start = list(start)
stds = approx.bij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
potential = quadpotential.QuadPotentialDiag(cov)
elif init == "map":
start = pm.find_MAP(include_transformed=True)
cov = pm.find_hessian(point=start)
start = [start] * chains
potential = quadpotential.QuadPotentialFull(cov)
elif init == "adapt_full":
start = [model.test_point] * chains
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
cov = np.eye(model.ndim)
potential = quadpotential.QuadPotentialFullAdapt(model.ndim, mean, cov, 10)
elif init == "jitter+adapt_full":
start = []
for _ in range(chains):
mean = {var: val.copy() for var, val in model.test_point.items()}
for val in mean.values():
val[...] += 2 * np.random.rand(*val.shape) - 1
start.append(mean)
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
cov = np.eye(model.ndim)
potential = quadpotential.QuadPotentialFullAdapt(model.ndim, mean, cov, 10)
else:
raise ValueError(f"Unknown initializer: {init}.")
step = pm.NUTS(potential=potential, model=model, **kwargs)
return start, step
|
https://github.com/pymc-devs/pymc3/issues/4276
|
WARNING: autodoc: failed to import function 't_stick_breaking' from module 'pymc3.distributions.transforms'; the following exception was raised:
Traceback (most recent call last):
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/util/inspect.py", line 334, in safe_getattr
return getattr(obj, name, *defargs)
AttributeError: module 'pymc3.distributions.transforms' has no attribute 't_stick_breaking'
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/ext/autodoc/importer.py", line 106, in import_object
obj = attrgetter(obj, mangled_name)
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/ext/autodoc/__init__.py", line 292, in get_attr
return autodoc_attrgetter(self.env.app, obj, name, *defargs)
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/ext/autodoc/__init__.py", line 2242, in autodoc_attrgetter
return safe_getattr(obj, name, *defargs)
File "/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/sphinx/util/inspect.py", line 350, in safe_getattr
raise AttributeError(name) from exc
AttributeError: t_stick_breaking
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/distributions/transforms.py:docstring of pymc3.distributions.transforms.StickBreaking:4: WARNING: Unexpected indentation.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.init_nuts:31: WARNING: Inline literal start-string without end-string.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.sample:127: WARNING: Inline literal start-string without end-string.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.sample:155: WARNING: Inline literal start-string without end-string.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.sample:149: WARNING: Unexpected indentation.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/sampling.py:docstring of pymc3.sampling.sample:161: WARNING: Unexpected indentation.
/Users/rpg/.virtualenvs/pymc3/lib/python3.7/site-packages/pymc3/backends/base.py:docstring of pymc3.backends.base.MultiTrace:56: WARNING: Block quote ends without a blank line; unexpected unindent.
Extension error:
Handler <function mangle_signature at 0x13d3d4200> for event 'autodoc-process-signature' threw an exception (exception: The section Notes appears twice in the docstring of Elemwise{clip,no_inplace} in None.)
|
AttributeError
|
def __str__(self, **kwargs):
try:
return self._str_repr(formatting="plain", **kwargs)
except:
return super().__str__()
|
def __str__(self, **kwargs):
return self._str_repr(formatting="plain", **kwargs)
|
https://github.com/pymc-devs/pymc3/issues/4240
|
vals
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/sayam/.local/lib/python3.8/site-packages/theano/gof/graph.py", line 449, in __repr__
to_print = [str(self)]
File "/home/sayam/Desktop/pymc/pymc3/pymc3/model.py", line 83, in __str__
return self._str_repr(formatting="plain", **kwargs)
File "/home/sayam/Desktop/pymc/pymc3/pymc3/model.py", line 77, in _str_repr
return self.distribution._str_repr(name=name, dist=dist, formatting=formatting)
File "/home/sayam/Desktop/pymc/pymc3/pymc3/distributions/distribution.py", line 176, in _str_repr
param_values = [
File "/home/sayam/Desktop/pymc/pymc3/pymc3/distributions/distribution.py", line 177, in <listcomp>
get_repr_for_variable(getattr(dist, x), formatting=formatting) for x in param_names
File "/home/sayam/Desktop/pymc/pymc3/pymc3/util.py", line 131, in get_repr_for_variable
name = variable.name if variable is not None else None
AttributeError: 'list' object has no attribute 'name'
|
AttributeError
|
def _distr_parameters_for_repr(self):
return ["mu"]
|
def _distr_parameters_for_repr(self):
return ["a"]
|
https://github.com/pymc-devs/pymc3/issues/4240
|
vals
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home/sayam/.local/lib/python3.8/site-packages/theano/gof/graph.py", line 449, in __repr__
to_print = [str(self)]
File "/home/sayam/Desktop/pymc/pymc3/pymc3/model.py", line 83, in __str__
return self._str_repr(formatting="plain", **kwargs)
File "/home/sayam/Desktop/pymc/pymc3/pymc3/model.py", line 77, in _str_repr
return self.distribution._str_repr(name=name, dist=dist, formatting=formatting)
File "/home/sayam/Desktop/pymc/pymc3/pymc3/distributions/distribution.py", line 176, in _str_repr
param_values = [
File "/home/sayam/Desktop/pymc/pymc3/pymc3/distributions/distribution.py", line 177, in <listcomp>
get_repr_for_variable(getattr(dist, x), formatting=formatting) for x in param_names
File "/home/sayam/Desktop/pymc/pymc3/pymc3/util.py", line 131, in get_repr_for_variable
name = variable.name if variable is not None else None
AttributeError: 'list' object has no attribute 'name'
|
AttributeError
|
def __init__(self, w, comp_dists, *args, **kwargs):
# comp_dists type checking
if not (
isinstance(comp_dists, Distribution)
or (
isinstance(comp_dists, Iterable)
and all((isinstance(c, Distribution) for c in comp_dists))
)
):
raise TypeError(
"Supplied Mixture comp_dists must be a "
"Distribution or an iterable of "
"Distributions. Got {} instead.".format(
type(comp_dists)
if not isinstance(comp_dists, Iterable)
else [type(c) for c in comp_dists]
)
)
shape = kwargs.pop("shape", ())
self.w = w = tt.as_tensor_variable(w)
self.comp_dists = comp_dists
defaults = kwargs.pop("defaults", [])
if all_discrete(comp_dists):
default_dtype = _conversion_map[theano.config.floatX]
else:
default_dtype = theano.config.floatX
try:
self.mean = (w * self._comp_means()).sum(axis=-1)
if "mean" not in defaults:
defaults.append("mean")
except AttributeError:
pass
dtype = kwargs.pop("dtype", default_dtype)
try:
if isinstance(comp_dists, Distribution):
comp_mode_logps = comp_dists.logp(comp_dists.mode)
else:
comp_mode_logps = tt.stack([cd.logp(cd.mode) for cd in comp_dists])
mode_idx = tt.argmax(tt.log(w) + comp_mode_logps, axis=-1)
self.mode = self._comp_modes()[mode_idx]
if "mode" not in defaults:
defaults.append("mode")
except (AttributeError, ValueError, IndexError):
pass
super().__init__(shape, dtype, defaults=defaults, *args, **kwargs)
|
def __init__(self, w, comp_dists, *args, **kwargs):
# comp_dists type checking
if not (
isinstance(comp_dists, Distribution)
or (
isinstance(comp_dists, Iterable)
and all((isinstance(c, Distribution) for c in comp_dists))
)
):
raise TypeError(
"Supplied Mixture comp_dists must be a "
"Distribution or an iterable of "
"Distributions. Got {} instead.".format(
type(comp_dists)
if not isinstance(comp_dists, Iterable)
else [type(c) for c in comp_dists]
)
)
shape = kwargs.pop("shape", ())
self.w = w = tt.as_tensor_variable(w)
self.comp_dists = comp_dists
defaults = kwargs.pop("defaults", [])
if all_discrete(comp_dists):
default_dtype = _conversion_map[theano.config.floatX]
else:
default_dtype = theano.config.floatX
try:
self.mean = (w * self._comp_means()).sum(axis=-1)
if "mean" not in defaults:
defaults.append("mean")
except AttributeError:
pass
dtype = kwargs.pop("dtype", default_dtype)
try:
comp_modes = self._comp_modes()
comp_mode_logps = self.logp(comp_modes)
self.mode = comp_modes[tt.argmax(w * comp_mode_logps, axis=-1)]
if "mode" not in defaults:
defaults.append("mode")
except (AttributeError, ValueError, IndexError):
pass
super().__init__(shape, dtype, defaults=defaults, *args, **kwargs)
|
https://github.com/pymc-devs/pymc3/issues/3994
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in _comp_modes(self)
289 try:
--> 290 return tt.as_tensor_variable(self.comp_dists.mode)
291 except AttributeError:
AttributeError: 'list' object has no attribute 'mode'
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-8-dedf5c958f15> in <module>
8
9 w2 = pm.Dirichlet('w2', np.array([1, 1]))
---> 10 like = pm.Mixture = pm.Mixture('like', w=w2, comp_dists=[mix, a3], observed=np.random.randn(20))
~/.local/lib/python3.8/site-packages/pymc3/distributions/distribution.py in __new__(cls, name, *args, **kwargs)
44 raise TypeError("observed needs to be data but got: {}".format(type(data)))
45 total_size = kwargs.pop('total_size', None)
---> 46 dist = cls.dist(*args, **kwargs)
47 return model.Var(name, dist, data, total_size)
48 else:
~/.local/lib/python3.8/site-packages/pymc3/distributions/distribution.py in dist(cls, *args, **kwargs)
55 def dist(cls, *args, **kwargs):
56 dist = object.__new__(cls)
---> 57 dist.__init__(*args, **kwargs)
58 return dist
59
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in __init__(self, w, comp_dists, *args, **kwargs)
139
140 try:
--> 141 comp_modes = self._comp_modes()
142 comp_mode_logps = self.logp(comp_modes)
143 self.mode = comp_modes[tt.argmax(w * comp_mode_logps, axis=-1)]
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in _comp_modes(self)
290 return tt.as_tensor_variable(self.comp_dists.mode)
291 except AttributeError:
--> 292 return tt.squeeze(tt.stack([comp_dist.mode
293 for comp_dist in self.comp_dists],
294 axis=-1))
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in stack(*tensors, **kwargs)
4726 dtype = scal.upcast(*[i.dtype for i in tensors])
4727 return theano.tensor.opt.MakeVector(dtype)(*tensors)
-> 4728 return join(axis, *[shape_padaxis(t, axis) for t in tensors])
4729
4730
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in join(axis, *tensors_list)
4500 return tensors_list[0]
4501 else:
-> 4502 return join_(axis, *tensors_list)
4503
4504
~/.local/lib/python3.8/site-packages/theano/gof/op.py in __call__(self, *inputs, **kwargs)
613 """
614 return_list = kwargs.pop('return_list', False)
--> 615 node = self.make_node(*inputs, **kwargs)
616
617 if config.compute_test_value != 'off':
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in make_node(self, *axis_and_tensors)
4232 return tensor(dtype=out_dtype, broadcastable=bcastable)
4233
-> 4234 return self._make_node_internal(
4235 axis, tensors, as_tensor_variable_args, output_maker)
4236
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in _make_node_internal(self, axis, tensors, as_tensor_variable_args, output_maker)
4299 if not python_all([x.ndim == len(bcastable)
4300 for x in as_tensor_variable_args[1:]]):
-> 4301 raise TypeError("Join() can only join tensors with the same "
4302 "number of dimensions.")
4303
TypeError: Join() can only join tensors with the same number of dimensions.
|
AttributeError
|
def logp(self, value):
"""
Calculate log-probability of defined Mixture distribution at specified value.
Parameters
----------
value: numeric
Value(s) for which log-probability is calculated. If the log probabilities for multiple
values are desired the values must be provided in a numpy array or theano tensor
Returns
-------
TensorVariable
"""
w = self.w
return bound(
logsumexp(tt.log(w) + self._comp_logp(value), axis=-1, keepdims=False),
w >= 0,
w <= 1,
tt.allclose(w.sum(axis=-1), 1),
broadcast_conditions=False,
)
|
def logp(self, value):
"""
Calculate log-probability of defined Mixture distribution at specified value.
Parameters
----------
value: numeric
Value(s) for which log-probability is calculated. If the log probabilities for multiple
values are desired the values must be provided in a numpy array or theano tensor
Returns
-------
TensorVariable
"""
w = self.w
return bound(
logsumexp(tt.log(w) + self._comp_logp(value), axis=-1),
w >= 0,
w <= 1,
tt.allclose(w.sum(axis=-1), 1),
broadcast_conditions=False,
)
|
https://github.com/pymc-devs/pymc3/issues/3994
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in _comp_modes(self)
289 try:
--> 290 return tt.as_tensor_variable(self.comp_dists.mode)
291 except AttributeError:
AttributeError: 'list' object has no attribute 'mode'
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-8-dedf5c958f15> in <module>
8
9 w2 = pm.Dirichlet('w2', np.array([1, 1]))
---> 10 like = pm.Mixture = pm.Mixture('like', w=w2, comp_dists=[mix, a3], observed=np.random.randn(20))
~/.local/lib/python3.8/site-packages/pymc3/distributions/distribution.py in __new__(cls, name, *args, **kwargs)
44 raise TypeError("observed needs to be data but got: {}".format(type(data)))
45 total_size = kwargs.pop('total_size', None)
---> 46 dist = cls.dist(*args, **kwargs)
47 return model.Var(name, dist, data, total_size)
48 else:
~/.local/lib/python3.8/site-packages/pymc3/distributions/distribution.py in dist(cls, *args, **kwargs)
55 def dist(cls, *args, **kwargs):
56 dist = object.__new__(cls)
---> 57 dist.__init__(*args, **kwargs)
58 return dist
59
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in __init__(self, w, comp_dists, *args, **kwargs)
139
140 try:
--> 141 comp_modes = self._comp_modes()
142 comp_mode_logps = self.logp(comp_modes)
143 self.mode = comp_modes[tt.argmax(w * comp_mode_logps, axis=-1)]
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in _comp_modes(self)
290 return tt.as_tensor_variable(self.comp_dists.mode)
291 except AttributeError:
--> 292 return tt.squeeze(tt.stack([comp_dist.mode
293 for comp_dist in self.comp_dists],
294 axis=-1))
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in stack(*tensors, **kwargs)
4726 dtype = scal.upcast(*[i.dtype for i in tensors])
4727 return theano.tensor.opt.MakeVector(dtype)(*tensors)
-> 4728 return join(axis, *[shape_padaxis(t, axis) for t in tensors])
4729
4730
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in join(axis, *tensors_list)
4500 return tensors_list[0]
4501 else:
-> 4502 return join_(axis, *tensors_list)
4503
4504
~/.local/lib/python3.8/site-packages/theano/gof/op.py in __call__(self, *inputs, **kwargs)
613 """
614 return_list = kwargs.pop('return_list', False)
--> 615 node = self.make_node(*inputs, **kwargs)
616
617 if config.compute_test_value != 'off':
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in make_node(self, *axis_and_tensors)
4232 return tensor(dtype=out_dtype, broadcastable=bcastable)
4233
-> 4234 return self._make_node_internal(
4235 axis, tensors, as_tensor_variable_args, output_maker)
4236
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in _make_node_internal(self, axis, tensors, as_tensor_variable_args, output_maker)
4299 if not python_all([x.ndim == len(bcastable)
4300 for x in as_tensor_variable_args[1:]]):
-> 4301 raise TypeError("Join() can only join tensors with the same "
4302 "number of dimensions.")
4303
TypeError: Join() can only join tensors with the same number of dimensions.
|
AttributeError
|
def logsumexp(x, axis=None, keepdims=True):
# Adapted from https://github.com/Theano/Theano/issues/1563
x_max = tt.max(x, axis=axis, keepdims=True)
res = tt.log(tt.sum(tt.exp(x - x_max), axis=axis, keepdims=True)) + x_max
return res if keepdims else res.squeeze()
|
def logsumexp(x, axis=None):
# Adapted from https://github.com/Theano/Theano/issues/1563
x_max = tt.max(x, axis=axis, keepdims=True)
return tt.log(tt.sum(tt.exp(x - x_max), axis=axis, keepdims=True)) + x_max
|
https://github.com/pymc-devs/pymc3/issues/3994
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in _comp_modes(self)
289 try:
--> 290 return tt.as_tensor_variable(self.comp_dists.mode)
291 except AttributeError:
AttributeError: 'list' object has no attribute 'mode'
During handling of the above exception, another exception occurred:
TypeError Traceback (most recent call last)
<ipython-input-8-dedf5c958f15> in <module>
8
9 w2 = pm.Dirichlet('w2', np.array([1, 1]))
---> 10 like = pm.Mixture = pm.Mixture('like', w=w2, comp_dists=[mix, a3], observed=np.random.randn(20))
~/.local/lib/python3.8/site-packages/pymc3/distributions/distribution.py in __new__(cls, name, *args, **kwargs)
44 raise TypeError("observed needs to be data but got: {}".format(type(data)))
45 total_size = kwargs.pop('total_size', None)
---> 46 dist = cls.dist(*args, **kwargs)
47 return model.Var(name, dist, data, total_size)
48 else:
~/.local/lib/python3.8/site-packages/pymc3/distributions/distribution.py in dist(cls, *args, **kwargs)
55 def dist(cls, *args, **kwargs):
56 dist = object.__new__(cls)
---> 57 dist.__init__(*args, **kwargs)
58 return dist
59
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in __init__(self, w, comp_dists, *args, **kwargs)
139
140 try:
--> 141 comp_modes = self._comp_modes()
142 comp_mode_logps = self.logp(comp_modes)
143 self.mode = comp_modes[tt.argmax(w * comp_mode_logps, axis=-1)]
~/.local/lib/python3.8/site-packages/pymc3/distributions/mixture.py in _comp_modes(self)
290 return tt.as_tensor_variable(self.comp_dists.mode)
291 except AttributeError:
--> 292 return tt.squeeze(tt.stack([comp_dist.mode
293 for comp_dist in self.comp_dists],
294 axis=-1))
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in stack(*tensors, **kwargs)
4726 dtype = scal.upcast(*[i.dtype for i in tensors])
4727 return theano.tensor.opt.MakeVector(dtype)(*tensors)
-> 4728 return join(axis, *[shape_padaxis(t, axis) for t in tensors])
4729
4730
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in join(axis, *tensors_list)
4500 return tensors_list[0]
4501 else:
-> 4502 return join_(axis, *tensors_list)
4503
4504
~/.local/lib/python3.8/site-packages/theano/gof/op.py in __call__(self, *inputs, **kwargs)
613 """
614 return_list = kwargs.pop('return_list', False)
--> 615 node = self.make_node(*inputs, **kwargs)
616
617 if config.compute_test_value != 'off':
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in make_node(self, *axis_and_tensors)
4232 return tensor(dtype=out_dtype, broadcastable=bcastable)
4233
-> 4234 return self._make_node_internal(
4235 axis, tensors, as_tensor_variable_args, output_maker)
4236
~/.local/lib/python3.8/site-packages/theano/tensor/basic.py in _make_node_internal(self, axis, tensors, as_tensor_variable_args, output_maker)
4299 if not python_all([x.ndim == len(bcastable)
4300 for x in as_tensor_variable_args[1:]]):
-> 4301 raise TypeError("Join() can only join tensors with the same "
4302 "number of dimensions.")
4303
TypeError: Join() can only join tensors with the same number of dimensions.
|
AttributeError
|
def pandas_to_array(data):
if hasattr(data, "values"): # pandas
if data.isnull().any().any(): # missing values
ret = np.ma.MaskedArray(data.values, data.isnull().values)
else:
ret = data.values
elif hasattr(data, "mask"):
if data.mask.any():
ret = data
else: # empty mask
ret = data.filled()
elif isinstance(data, theano.gof.graph.Variable):
ret = data
elif sps.issparse(data):
ret = data
elif isgenerator(data):
ret = generator(data)
else:
ret = np.asarray(data)
return pm.floatX(ret)
|
def pandas_to_array(data):
if hasattr(data, "values"): # pandas
if data.isnull().any().any(): # missing values
ret = np.ma.MaskedArray(data.values, data.isnull().values)
else:
ret = data.values
elif hasattr(data, "mask"):
ret = data
elif isinstance(data, theano.gof.graph.Variable):
ret = data
elif sps.issparse(data):
ret = data
elif isgenerator(data):
ret = generator(data)
else:
ret = np.asarray(data)
return pm.floatX(ret)
|
https://github.com/pymc-devs/pymc3/issues/3576
|
/usr/local/lib/python3.6/dist-packages/pymc3/model.py:1331: UserWarning: Data in X_t contains missing values and will be automatically imputed from the sampling distribution.
warnings.warn(impute_message, UserWarning)
Auto-assigning NUTS sampler...
Initializing NUTS using adapt_diag...
Multiprocess sampling (2 chains in 2 jobs)
NUTS: [coef, Intercept_t, X_t_missing, Xmu_t]
Sampling 2 chains: 100%|ββββββββββ| 3000/3000 [00:15<00:00, 195.87draws/s]
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-44-457eee33d21f> in <module>()
9 y_prob = pm.math.sigmoid(intercept + coef * X_modeled)
10 y_t = pm.Bernoulli('y', y_prob, observed=y)
---> 11 trace = pm.sample(1000, tune=500, chains=2, cores=2, init='adapt_diag')
3 frames
/usr/local/lib/python3.6/dist-packages/pymc3/sampling.py in sample(draws, step, init, n_init, start, trace, chain_idx, chains, cores, tune, progressbar, model, random_seed, discard_tuned_samples, compute_convergence_checks, **kwargs)
464 warnings.warn("The number of samples is too small to check convergence reliably.")
465 else:
--> 466 trace.report._run_convergence_checks(trace, model)
467
468 trace.report._log_summary()
/usr/local/lib/python3.6/dist-packages/pymc3/backends/report.py in _run_convergence_checks(self, trace, model)
86
87 warnings = []
---> 88 rhat_max = max(val.max() for val in gelman_rubin.values())
89 if rhat_max > 1.4:
90 msg = ("The gelman-rubin statistic is larger than 1.4 for some "
/usr/local/lib/python3.6/dist-packages/pymc3/backends/report.py in <genexpr>(.0)
86
87 warnings = []
---> 88 rhat_max = max(val.max() for val in gelman_rubin.values())
89 if rhat_max > 1.4:
90 msg = ("The gelman-rubin statistic is larger than 1.4 for some "
/usr/local/lib/python3.6/dist-packages/numpy/core/_methods.py in _amax(a, axis, out, keepdims, initial)
26 def _amax(a, axis=None, out=None, keepdims=False,
27 initial=_NoValue):
---> 28 return umr_maximum(a, axis, None, out, keepdims, initial)
29
30 def _amin(a, axis=None, out=None, keepdims=False,
ValueError: zero-size array to reduction operation maximum which has no identity
|
ValueError
|
def random(self, point=None, size=None):
"""
Draw random values from TruncatedNormal distribution.
Parameters
----------
point : dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size : int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
mu, sigma, lower, upper = draw_values(
[self.mu, self.sigma, self.lower, self.upper], point=point, size=size
)
return generate_samples(
self._random,
mu=mu,
sigma=sigma,
lower=lower,
upper=upper,
dist_shape=self.shape,
size=size,
)
|
def random(self, point=None, size=None):
"""
Draw random values from TruncatedNormal distribution.
Parameters
----------
point : dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size : int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
mu_v, std_v, a_v, b_v = draw_values(
[self.mu, self.sigma, self.lower, self.upper], point=point, size=size
)
return generate_samples(
stats.truncnorm.rvs,
a=(a_v - mu_v) / std_v,
b=(b_v - mu_v) / std_v,
loc=mu_v,
scale=std_v,
dist_shape=self.shape,
size=size,
)
|
https://github.com/pymc-devs/pymc3/issues/3481
|
ValueError Traceback (most recent call last)
~/projects/xplan/xplan-experiment-analysis/sample_prior_predictive_error.py in <module>
8
9 with model:
---> 10 pre_trace = pm.sample_prior_predictive()
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, var_names, random_seed)
1320 names = get_default_varnames(model.named_vars, include_transformed=False)
1321 # draw_values fails with auto-transformed variables. transform them later!
-> 1322 values = draw_values([model[name] for name in names], size=samples)
1323
1324 data = {k: v for k, v in zip(names, values)}
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/distribution.py in draw_values(params, point, size)
393 point=point,
394 givens=temp_givens,
--> 395 size=size)
396 givens[next_.name] = (next_, value)
397 drawn[(next_, size)] = value
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/distribution.py in _draw_value(param, point, givens, size)
579 else:
580 dist_tmp.shape = val.shape
--> 581 return dist_tmp.random(point=point, size=size)
582 else:
583 return param.distribution.random(point=point, size=size)
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/continuous.py in random(self, point, size)
668 [self.mu, self.sigma, self.lower, self.upper], point=point, size=size)
669 return generate_samples(stats.truncnorm.rvs,
--> 670 a=(a_v - mu_v)/std_v,
671 b=(b_v - mu_v) / std_v,
672 loc=mu_v,
ValueError: operands could not be broadcast together with shapes (500,1031) (500,)
|
ValueError
|
def random(self, point=None, size=None):
"""
Draw random values from Triangular distribution.
Parameters
----------
point : dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size : int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
c, lower, upper = draw_values(
[self.c, self.lower, self.upper], point=point, size=size
)
return generate_samples(
self._random, c=c, lower=lower, upper=upper, size=size, dist_shape=self.shape
)
|
def random(self, point=None, size=None):
"""
Draw random values from Triangular distribution.
Parameters
----------
point : dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size : int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
c, lower, upper = draw_values(
[self.c, self.lower, self.upper], point=point, size=size
)
scale = upper - lower
c_ = (c - lower) / scale
return generate_samples(
stats.triang.rvs, c=c_, loc=lower, scale=scale, size=size, dist_shape=self.shape
)
|
https://github.com/pymc-devs/pymc3/issues/3481
|
ValueError Traceback (most recent call last)
~/projects/xplan/xplan-experiment-analysis/sample_prior_predictive_error.py in <module>
8
9 with model:
---> 10 pre_trace = pm.sample_prior_predictive()
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, var_names, random_seed)
1320 names = get_default_varnames(model.named_vars, include_transformed=False)
1321 # draw_values fails with auto-transformed variables. transform them later!
-> 1322 values = draw_values([model[name] for name in names], size=samples)
1323
1324 data = {k: v for k, v in zip(names, values)}
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/distribution.py in draw_values(params, point, size)
393 point=point,
394 givens=temp_givens,
--> 395 size=size)
396 givens[next_.name] = (next_, value)
397 drawn[(next_, size)] = value
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/distribution.py in _draw_value(param, point, givens, size)
579 else:
580 dist_tmp.shape = val.shape
--> 581 return dist_tmp.random(point=point, size=size)
582 else:
583 return param.distribution.random(point=point, size=size)
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/continuous.py in random(self, point, size)
668 [self.mu, self.sigma, self.lower, self.upper], point=point, size=size)
669 return generate_samples(stats.truncnorm.rvs,
--> 670 a=(a_v - mu_v)/std_v,
671 b=(b_v - mu_v) / std_v,
672 loc=mu_v,
ValueError: operands could not be broadcast together with shapes (500,1031) (500,)
|
ValueError
|
def random(self, point=None, size=None):
"""
Draw random values from Rice distribution.
Parameters
----------
point : dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size : int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
nu, sigma = draw_values([self.nu, self.sigma], point=point, size=size)
return generate_samples(
self._random, nu=nu, sigma=sigma, dist_shape=self.shape, size=size
)
|
def random(self, point=None, size=None):
"""
Draw random values from Rice distribution.
Parameters
----------
point : dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size : int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
nu, sigma = draw_values([self.nu, self.sigma], point=point, size=size)
return generate_samples(
stats.rice.rvs,
b=nu / sigma,
scale=sigma,
loc=0,
dist_shape=self.shape,
size=size,
)
|
https://github.com/pymc-devs/pymc3/issues/3481
|
ValueError Traceback (most recent call last)
~/projects/xplan/xplan-experiment-analysis/sample_prior_predictive_error.py in <module>
8
9 with model:
---> 10 pre_trace = pm.sample_prior_predictive()
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, var_names, random_seed)
1320 names = get_default_varnames(model.named_vars, include_transformed=False)
1321 # draw_values fails with auto-transformed variables. transform them later!
-> 1322 values = draw_values([model[name] for name in names], size=samples)
1323
1324 data = {k: v for k, v in zip(names, values)}
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/distribution.py in draw_values(params, point, size)
393 point=point,
394 givens=temp_givens,
--> 395 size=size)
396 givens[next_.name] = (next_, value)
397 drawn[(next_, size)] = value
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/distribution.py in _draw_value(param, point, givens, size)
579 else:
580 dist_tmp.shape = val.shape
--> 581 return dist_tmp.random(point=point, size=size)
582 else:
583 return param.distribution.random(point=point, size=size)
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/continuous.py in random(self, point, size)
668 [self.mu, self.sigma, self.lower, self.upper], point=point, size=size)
669 return generate_samples(stats.truncnorm.rvs,
--> 670 a=(a_v - mu_v)/std_v,
671 b=(b_v - mu_v) / std_v,
672 loc=mu_v,
ValueError: operands could not be broadcast together with shapes (500,1031) (500,)
|
ValueError
|
def random(self, point=None, size=None):
"""
Draw random values from ZeroInflatedNegativeBinomial distribution.
Parameters
----------
point : dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size : int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
mu, alpha, psi = draw_values(
[self.mu, self.alpha, self.psi], point=point, size=size
)
g = generate_samples(
self._random, mu=mu, alpha=alpha, dist_shape=self.shape, size=size
)
g[g == 0] = np.finfo(float).eps # Just in case
g, psi = broadcast_distribution_samples([g, psi], size=size)
return stats.poisson.rvs(g) * (np.random.random(g.shape) < psi)
|
def random(self, point=None, size=None):
"""
Draw random values from ZeroInflatedNegativeBinomial distribution.
Parameters
----------
point : dict, optional
Dict of variable values on which random values are to be
conditioned (uses default point if not specified).
size : int, optional
Desired size of random sample (returns one sample if not
specified).
Returns
-------
array
"""
mu, alpha, psi = draw_values(
[self.mu, self.alpha, self.psi], point=point, size=size
)
g = generate_samples(
stats.gamma.rvs, alpha, scale=mu / alpha, dist_shape=self.shape, size=size
)
g[g == 0] = np.finfo(float).eps # Just in case
g, psi = broadcast_distribution_samples([g, psi], size=size)
return stats.poisson.rvs(g) * (np.random.random(g.shape) < psi)
|
https://github.com/pymc-devs/pymc3/issues/3481
|
ValueError Traceback (most recent call last)
~/projects/xplan/xplan-experiment-analysis/sample_prior_predictive_error.py in <module>
8
9 with model:
---> 10 pre_trace = pm.sample_prior_predictive()
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, var_names, random_seed)
1320 names = get_default_varnames(model.named_vars, include_transformed=False)
1321 # draw_values fails with auto-transformed variables. transform them later!
-> 1322 values = draw_values([model[name] for name in names], size=samples)
1323
1324 data = {k: v for k, v in zip(names, values)}
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/distribution.py in draw_values(params, point, size)
393 point=point,
394 givens=temp_givens,
--> 395 size=size)
396 givens[next_.name] = (next_, value)
397 drawn[(next_, size)] = value
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/distribution.py in _draw_value(param, point, givens, size)
579 else:
580 dist_tmp.shape = val.shape
--> 581 return dist_tmp.random(point=point, size=size)
582 else:
583 return param.distribution.random(point=point, size=size)
/usr/local/Cellar/python/3.7.3/Frameworks/Python.framework/Versions/3.7/lib/python3.7/site-packages/pymc3/distributions/continuous.py in random(self, point, size)
668 [self.mu, self.sigma, self.lower, self.upper], point=point, size=size)
669 return generate_samples(stats.truncnorm.rvs,
--> 670 a=(a_v - mu_v)/std_v,
671 b=(b_v - mu_v) / std_v,
672 loc=mu_v,
ValueError: operands could not be broadcast together with shapes (500,1031) (500,)
|
ValueError
|
def _repr_cov_params(self, dist=None):
if dist is None:
dist = self
if self._cov_type == "chol":
chol = get_variable_name(self.chol_cov)
return r"\mathit{{chol}}={}".format(chol)
elif self._cov_type == "cov":
cov = get_variable_name(self.cov)
return r"\mathit{{cov}}={}".format(cov)
elif self._cov_type == "tau":
tau = get_variable_name(self.tau)
return r"\mathit{{tau}}={}".format(tau)
|
def _repr_cov_params(self, dist=None):
if dist is None:
dist = self
if self._cov_type == "chol":
chol = get_variable_name(self.chol)
return r"\mathit{{chol}}={}".format(chol)
elif self._cov_type == "cov":
cov = get_variable_name(self.cov)
return r"\mathit{{cov}}={}".format(cov)
elif self._cov_type == "tau":
tau = get_variable_name(self.tau)
return r"\mathit{{tau}}={}".format(tau)
|
https://github.com/pymc-devs/pymc3/issues/3450
|
Traceback (most recent call last):
File "fail.py", line 9, in <module>
print(d.distribution._repr_latex_())
File "/nix/store/4c6ihiawh232fszikcyxhdk32rzk4l28-python3-3.7.2-env/lib/python3.7/site-packages/pymc3/distributions/multivariate.py", line 286, in _repr_latex_
.format(name, name_mu, self._repr_cov_params(dist)))
File "/nix/store/4c6ihiawh232fszikcyxhdk32rzk4l28-python3-3.7.2-env/lib/python3.7/site-packages/pymc3/distributions/multivariate.py", line 145, in _repr_cov_params
chol = get_variable_name(self.chol)
AttributeError: 'MvNormal' object has no attribute 'chol'
|
AttributeError
|
def __init__(
self,
mu=0,
sigma=None,
tau=None,
lower=None,
upper=None,
transform="auto",
sd=None,
*args,
**kwargs,
):
if sd is not None:
sigma = sd
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
self.sigma = self.sd = tt.as_tensor_variable(sigma)
self.tau = tt.as_tensor_variable(tau)
self.lower_check = (
tt.as_tensor_variable(floatX(lower)) if lower is not None else lower
)
self.upper_check = (
tt.as_tensor_variable(floatX(upper)) if upper is not None else upper
)
self.lower = (
tt.as_tensor_variable(floatX(lower))
if lower is not None
else tt.as_tensor_variable(-np.inf)
)
self.upper = (
tt.as_tensor_variable(floatX(upper))
if upper is not None
else tt.as_tensor_variable(np.inf)
)
self.mu = tt.as_tensor_variable(floatX(mu))
if self.lower_check is None and self.upper_check is None:
self._defaultval = mu
elif self.lower_check is None and self.upper_check is not None:
self._defaultval = self.upper - 1.0
elif self.lower_check is not None and self.upper_check is None:
self._defaultval = self.lower + 1.0
else:
self._defaultval = (self.lower + self.upper) / 2
assert_negative_support(sigma, "sigma", "TruncatedNormal")
assert_negative_support(tau, "tau", "TruncatedNormal")
super().__init__(
defaults=("_defaultval",),
transform=transform,
lower=lower,
upper=upper,
*args,
**kwargs,
)
|
def __init__(
self,
mu=0,
sigma=None,
tau=None,
lower=None,
upper=None,
transform="auto",
sd=None,
*args,
**kwargs,
):
if sd is not None:
sigma = sd
tau, sigma = get_tau_sigma(tau=tau, sigma=sigma)
self.sigma = self.sd = tt.as_tensor_variable(sigma)
self.tau = tt.as_tensor_variable(tau)
self.lower = tt.as_tensor_variable(floatX(lower)) if lower is not None else lower
self.upper = tt.as_tensor_variable(floatX(upper)) if upper is not None else upper
self.mu = tt.as_tensor_variable(floatX(mu))
if self.lower is None and self.upper is None:
self._defaultval = mu
elif self.lower is None and self.upper is not None:
self._defaultval = self.upper - 1.0
elif self.lower is not None and self.upper is None:
self._defaultval = self.lower + 1.0
else:
self._defaultval = (self.lower + self.upper) / 2
assert_negative_support(sigma, "sigma", "TruncatedNormal")
assert_negative_support(tau, "tau", "TruncatedNormal")
super().__init__(
defaults=("_defaultval",),
transform=transform,
lower=lower,
upper=upper,
*args,
**kwargs,
)
|
https://github.com/pymc-devs/pymc3/issues/3248
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/continuous.py", line 578, in random
[self.mu, self.sd, self.lower, self.upper], point=point, size=size)
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/distribution.py", line 321, in draw_values
evaluated[param_idx] = _draw_value(param, point=point, givens=givens.values(), size=size)
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/distribution.py", line 418, in _draw_value
raise ValueError('Unexpected type in draw_value: %s' % type(param))
ValueError: Unexpected type in draw_value: <class 'NoneType'>
|
ValueError
|
def logp(self, value):
"""
Calculate log-probability of TruncatedNormal distribution at specified value.
Parameters
----------
value : numeric
Value(s) for which log-probability is calculated. If the log probabilities for multiple
values are desired the values must be provided in a numpy array or theano tensor
Returns
-------
TensorVariable
"""
mu = self.mu
sigma = self.sigma
norm = self._normalization()
logp = Normal.dist(mu=mu, sigma=sigma).logp(value) - norm
bounds = [sigma > 0]
if self.lower_check is not None:
bounds.append(value >= self.lower)
if self.upper_check is not None:
bounds.append(value <= self.upper)
return bound(logp, *bounds)
|
def logp(self, value):
"""
Calculate log-probability of TruncatedNormal distribution at specified value.
Parameters
----------
value : numeric
Value(s) for which log-probability is calculated. If the log probabilities for multiple
values are desired the values must be provided in a numpy array or theano tensor
Returns
-------
TensorVariable
"""
mu = self.mu
sigma = self.sigma
norm = self._normalization()
logp = Normal.dist(mu=mu, sigma=sigma).logp(value) - norm
bounds = [sigma > 0]
if self.lower is not None:
bounds.append(value >= self.lower)
if self.upper is not None:
bounds.append(value <= self.upper)
return bound(logp, *bounds)
|
https://github.com/pymc-devs/pymc3/issues/3248
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/continuous.py", line 578, in random
[self.mu, self.sd, self.lower, self.upper], point=point, size=size)
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/distribution.py", line 321, in draw_values
evaluated[param_idx] = _draw_value(param, point=point, givens=givens.values(), size=size)
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/distribution.py", line 418, in _draw_value
raise ValueError('Unexpected type in draw_value: %s' % type(param))
ValueError: Unexpected type in draw_value: <class 'NoneType'>
|
ValueError
|
def _normalization(self):
mu, sigma = self.mu, self.sigma
if self.lower_check is None and self.upper_check is None:
return 0.0
if self.lower_check is not None and self.upper_check is not None:
lcdf_a = normal_lcdf(mu, sigma, self.lower)
lcdf_b = normal_lcdf(mu, sigma, self.upper)
lsf_a = normal_lccdf(mu, sigma, self.lower)
lsf_b = normal_lccdf(mu, sigma, self.upper)
return tt.switch(
self.lower > 0,
logdiffexp(lsf_a, lsf_b),
logdiffexp(lcdf_b, lcdf_a),
)
if self.lower_check is not None:
return normal_lccdf(mu, sigma, self.lower)
else:
return normal_lcdf(mu, sigma, self.upper)
|
def _normalization(self):
mu, sigma = self.mu, self.sigma
if self.lower is None and self.upper is None:
return 0.0
if self.lower is not None and self.upper is not None:
lcdf_a = normal_lcdf(mu, sigma, self.lower)
lcdf_b = normal_lcdf(mu, sigma, self.upper)
lsf_a = normal_lccdf(mu, sigma, self.lower)
lsf_b = normal_lccdf(mu, sigma, self.upper)
return tt.switch(
self.lower > 0,
logdiffexp(lsf_a, lsf_b),
logdiffexp(lcdf_b, lcdf_a),
)
if self.lower is not None:
return normal_lccdf(mu, sigma, self.lower)
else:
return normal_lcdf(mu, sigma, self.upper)
|
https://github.com/pymc-devs/pymc3/issues/3248
|
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/continuous.py", line 578, in random
[self.mu, self.sd, self.lower, self.upper], point=point, size=size)
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/distribution.py", line 321, in draw_values
evaluated[param_idx] = _draw_value(param, point=point, givens=givens.values(), size=size)
File "/Applications/anaconda3/envs/Fit2/lib/python3.6/site-packages/pymc3/distributions/distribution.py", line 418, in _draw_value
raise ValueError('Unexpected type in draw_value: %s' % type(param))
ValueError: Unexpected type in draw_value: <class 'NoneType'>
|
ValueError
|
def __new__(cls, *args, **kwargs):
# resolves the parent instance
instance = super().__new__(cls)
if cls.get_contexts():
potential_parent = cls.get_contexts()[-1]
# We have to make sure that the context is a _DrawValuesContext
# and not a Model
if isinstance(potential_parent, _DrawValuesContext):
instance._parent = potential_parent
else:
instance._parent = None
else:
instance._parent = None
return instance
|
def __new__(cls, *args, **kwargs):
# resolves the parent instance
instance = super().__new__(cls)
if cls.get_contexts():
potential_parent = cls.get_contexts()[-1]
# We have to make sure that the context is a _DrawValuesContext
# and not a Model
if isinstance(potential_parent, cls):
instance._parent = potential_parent
else:
instance._parent = None
else:
instance._parent = None
return instance
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def __init__(self):
if self.parent is not None:
# All _DrawValuesContext instances that are in the context of
# another _DrawValuesContext will share the reference to the
# drawn_vars dictionary. This means that separate branches
# in the nested _DrawValuesContext context tree will see the
# same drawn values.
# The drawn_vars keys shall be (RV, size) tuples
self.drawn_vars = self.parent.drawn_vars
else:
self.drawn_vars = dict()
|
def __init__(self):
if self.parent is not None:
# All _DrawValuesContext instances that are in the context of
# another _DrawValuesContext will share the reference to the
# drawn_vars dictionary. This means that separate branches
# in the nested _DrawValuesContext context tree will see the
# same drawn values
self.drawn_vars = self.parent.drawn_vars
else:
self.drawn_vars = dict()
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def draw_values(params, point=None, size=None):
"""
Draw (fix) parameter values. Handles a number of cases:
1) The parameter is a scalar
2) The parameter is an *RV
a) parameter can be fixed to the value in the point
b) parameter can be fixed by sampling from the *RV
c) parameter can be fixed using tag.test_value (last resort)
3) The parameter is a tensor variable/constant. Can be evaluated using
theano.function, but a variable may contain nodes which
a) are named parameters in the point
b) are *RVs with a random method
"""
# Get fast drawable values (i.e. things in point or numbers, arrays,
# constants or shares, or things that were already drawn in related
# contexts)
if point is None:
point = {}
with _DrawValuesContext() as context:
params = dict(enumerate(params))
drawn = context.drawn_vars
evaluated = {}
symbolic_params = []
for i, p in params.items():
# If the param is fast drawable, then draw the value immediately
if is_fast_drawable(p):
v = _draw_value(p, point=point, size=size)
evaluated[i] = v
continue
name = getattr(p, "name", None)
if (p, size) in drawn:
# param was drawn in related contexts
v = drawn[(p, size)]
evaluated[i] = v
elif name is not None and name in point:
# param.name is in point
v = point[name]
evaluated[i] = drawn[(p, size)] = v
else:
# param still needs to be drawn
symbolic_params.append((i, p))
if not symbolic_params:
# We only need to enforce the correct order if there are symbolic
# params that could be drawn in variable order
return [evaluated[i] for i in params]
# Distribution parameters may be nodes which have named node-inputs
# specified in the point. Need to find the node-inputs, their
# parents and children to replace them.
leaf_nodes = {}
named_nodes_parents = {}
named_nodes_children = {}
for _, param in symbolic_params:
if hasattr(param, "name"):
# Get the named nodes under the `param` node
nn, nnp, nnc = get_named_nodes_and_relations(param)
leaf_nodes.update(nn)
# Update the discovered parental relationships
for k in nnp.keys():
if k not in named_nodes_parents.keys():
named_nodes_parents[k] = nnp[k]
else:
named_nodes_parents[k].update(nnp[k])
# Update the discovered child relationships
for k in nnc.keys():
if k not in named_nodes_children.keys():
named_nodes_children[k] = nnc[k]
else:
named_nodes_children[k].update(nnc[k])
# Init givens and the stack of nodes to try to `_draw_value` from
givens = {
p.name: (p, v)
for (p, size), v in drawn.items()
if getattr(p, "name", None) is not None
}
stack = list(leaf_nodes.values()) # A queue would be more appropriate
while stack:
next_ = stack.pop(0)
if (next_, size) in drawn:
# If the node already has a givens value, skip it
continue
elif isinstance(next_, (tt.TensorConstant, tt.sharedvar.SharedVariable)):
# If the node is a theano.tensor.TensorConstant or a
# theano.tensor.sharedvar.SharedVariable, its value will be
# available automatically in _compile_theano_function so
# we can skip it. Furthermore, if this node was treated as a
# TensorVariable that should be compiled by theano in
# _compile_theano_function, it would raise a `TypeError:
# ('Constants not allowed in param list', ...)` for
# TensorConstant, and a `TypeError: Cannot use a shared
# variable (...) as explicit input` for SharedVariable.
continue
else:
# If the node does not have a givens value, try to draw it.
# The named node's children givens values must also be taken
# into account.
children = named_nodes_children[next_]
temp_givens = [givens[k] for k in givens if k in children]
try:
# This may fail for autotransformed RVs, which don't
# have the random method
value = _draw_value(
next_, point=point, givens=temp_givens, size=size
)
givens[next_.name] = (next_, value)
drawn[(next_, size)] = value
except theano.gof.fg.MissingInputError:
# The node failed, so we must add the node's parents to
# the stack of nodes to try to draw from. We exclude the
# nodes in the `params` list.
stack.extend(
[
node
for node in named_nodes_parents[next_]
if node is not None
and (node, size) not in drawn
and node not in params
]
)
# the below makes sure the graph is evaluated in order
# test_distributions_random::TestDrawValues::test_draw_order fails without it
# The remaining params that must be drawn are all hashable
to_eval = set()
missing_inputs = set([j for j, p in symbolic_params])
while to_eval or missing_inputs:
if to_eval == missing_inputs:
raise ValueError(
"Cannot resolve inputs for {}".format(
[str(params[j]) for j in to_eval]
)
)
to_eval = set(missing_inputs)
missing_inputs = set()
for param_idx in to_eval:
param = params[param_idx]
if (param, size) in drawn:
evaluated[param_idx] = drawn[(param, size)]
else:
try: # might evaluate in a bad order,
value = _draw_value(
param, point=point, givens=givens.values(), size=size
)
evaluated[param_idx] = drawn[(param, size)] = value
givens[param.name] = (param, value)
except theano.gof.fg.MissingInputError:
missing_inputs.add(param_idx)
return [evaluated[j] for j in params] # set the order back
|
def draw_values(params, point=None, size=None):
"""
Draw (fix) parameter values. Handles a number of cases:
1) The parameter is a scalar
2) The parameter is an *RV
a) parameter can be fixed to the value in the point
b) parameter can be fixed by sampling from the *RV
c) parameter can be fixed using tag.test_value (last resort)
3) The parameter is a tensor variable/constant. Can be evaluated using
theano.function, but a variable may contain nodes which
a) are named parameters in the point
b) are *RVs with a random method
"""
# Get fast drawable values (i.e. things in point or numbers, arrays,
# constants or shares, or things that were already drawn in related
# contexts)
if point is None:
point = {}
with _DrawValuesContext() as context:
params = dict(enumerate(params))
drawn = context.drawn_vars
evaluated = {}
symbolic_params = []
for i, p in params.items():
# If the param is fast drawable, then draw the value immediately
if is_fast_drawable(p):
v = _draw_value(p, point=point, size=size)
evaluated[i] = v
continue
name = getattr(p, "name", None)
if p in drawn:
# param was drawn in related contexts
v = drawn[p]
evaluated[i] = v
elif name is not None and name in point:
# param.name is in point
v = point[name]
evaluated[i] = drawn[p] = v
else:
# param still needs to be drawn
symbolic_params.append((i, p))
if not symbolic_params:
# We only need to enforce the correct order if there are symbolic
# params that could be drawn in variable order
return [evaluated[i] for i in params]
# Distribution parameters may be nodes which have named node-inputs
# specified in the point. Need to find the node-inputs, their
# parents and children to replace them.
leaf_nodes = {}
named_nodes_parents = {}
named_nodes_children = {}
for _, param in symbolic_params:
if hasattr(param, "name"):
# Get the named nodes under the `param` node
nn, nnp, nnc = get_named_nodes_and_relations(param)
leaf_nodes.update(nn)
# Update the discovered parental relationships
for k in nnp.keys():
if k not in named_nodes_parents.keys():
named_nodes_parents[k] = nnp[k]
else:
named_nodes_parents[k].update(nnp[k])
# Update the discovered child relationships
for k in nnc.keys():
if k not in named_nodes_children.keys():
named_nodes_children[k] = nnc[k]
else:
named_nodes_children[k].update(nnc[k])
# Init givens and the stack of nodes to try to `_draw_value` from
givens = {
p.name: (p, v)
for p, v in drawn.items()
if getattr(p, "name", None) is not None
}
stack = list(leaf_nodes.values()) # A queue would be more appropriate
while stack:
next_ = stack.pop(0)
if next_ in drawn:
# If the node already has a givens value, skip it
continue
elif isinstance(next_, (tt.TensorConstant, tt.sharedvar.SharedVariable)):
# If the node is a theano.tensor.TensorConstant or a
# theano.tensor.sharedvar.SharedVariable, its value will be
# available automatically in _compile_theano_function so
# we can skip it. Furthermore, if this node was treated as a
# TensorVariable that should be compiled by theano in
# _compile_theano_function, it would raise a `TypeError:
# ('Constants not allowed in param list', ...)` for
# TensorConstant, and a `TypeError: Cannot use a shared
# variable (...) as explicit input` for SharedVariable.
continue
else:
# If the node does not have a givens value, try to draw it.
# The named node's children givens values must also be taken
# into account.
children = named_nodes_children[next_]
temp_givens = [givens[k] for k in givens if k in children]
try:
# This may fail for autotransformed RVs, which don't
# have the random method
value = _draw_value(
next_, point=point, givens=temp_givens, size=size
)
givens[next_.name] = (next_, value)
drawn[next_] = value
except theano.gof.fg.MissingInputError:
# The node failed, so we must add the node's parents to
# the stack of nodes to try to draw from. We exclude the
# nodes in the `params` list.
stack.extend(
[
node
for node in named_nodes_parents[next_]
if node is not None
and node.name not in drawn
and node not in params
]
)
# the below makes sure the graph is evaluated in order
# test_distributions_random::TestDrawValues::test_draw_order fails without it
# The remaining params that must be drawn are all hashable
to_eval = set()
missing_inputs = set([j for j, p in symbolic_params])
while to_eval or missing_inputs:
if to_eval == missing_inputs:
raise ValueError(
"Cannot resolve inputs for {}".format(
[str(params[j]) for j in to_eval]
)
)
to_eval = set(missing_inputs)
missing_inputs = set()
for param_idx in to_eval:
param = params[param_idx]
if param in drawn:
evaluated[param_idx] = drawn[param]
else:
try: # might evaluate in a bad order,
value = _draw_value(
param, point=point, givens=givens.values(), size=size
)
evaluated[param_idx] = drawn[param] = value
givens[param.name] = (param, value)
except theano.gof.fg.MissingInputError:
missing_inputs.add(param_idx)
return [evaluated[j] for j in params] # set the order back
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def _draw_value(param, point=None, givens=None, size=None):
"""Draw a random value from a distribution or return a constant.
Parameters
----------
param : number, array like, theano variable or pymc3 random variable
The value or distribution. Constants or shared variables
will be converted to an array and returned. Theano variables
are evaluated. If `param` is a pymc3 random variables, draw
a new value from it and return that, unless a value is specified
in `point`.
point : dict, optional
A dictionary from pymc3 variable names to their values.
givens : dict, optional
A dictionary from theano variables to their values. These values
are used to evaluate `param` if it is a theano variable.
size : int, optional
Number of samples
"""
if isinstance(param, (numbers.Number, np.ndarray)):
return param
elif isinstance(param, tt.TensorConstant):
return param.value
elif isinstance(param, tt.sharedvar.SharedVariable):
return param.get_value()
elif isinstance(param, (tt.TensorVariable, MultiObservedRV)):
if point and hasattr(param, "model") and param.name in point:
return point[param.name]
elif hasattr(param, "random") and param.random is not None:
return param.random(point=point, size=size)
elif (
hasattr(param, "distribution")
and hasattr(param.distribution, "random")
and param.distribution.random is not None
):
if hasattr(param, "observations"):
# shape inspection for ObservedRV
dist_tmp = param.distribution
try:
distshape = param.observations.shape.eval()
except AttributeError:
distshape = param.observations.shape
dist_tmp.shape = distshape
try:
dist_tmp.random(point=point, size=size)
except (ValueError, TypeError):
# reset shape to account for shape changes
# with theano.shared inputs
dist_tmp.shape = np.array([])
# We want to draw values to infer the dist_shape,
# we don't want to store these drawn values to the context
with _DrawValuesContextBlocker():
val = np.atleast_1d(dist_tmp.random(point=point, size=None))
# Sometimes point may change the size of val but not the
# distribution's shape
if point and size is not None:
temp_size = np.atleast_1d(size)
if all(val.shape[: len(temp_size)] == temp_size):
dist_tmp.shape = val.shape[len(temp_size) :]
else:
dist_tmp.shape = val.shape
return dist_tmp.random(point=point, size=size)
else:
return param.distribution.random(point=point, size=size)
else:
if givens:
variables, values = list(zip(*givens))
else:
variables = values = []
# We only truly care if the ancestors of param that were given
# value have the matching dshape and val.shape
param_ancestors = set(
theano.gof.graph.ancestors([param], blockers=list(variables))
)
inputs = [
(var, val)
for var, val in zip(variables, values)
if var in param_ancestors
]
if inputs:
input_vars, input_vals = list(zip(*inputs))
else:
input_vars = []
input_vals = []
func = _compile_theano_function(param, input_vars)
if size is not None:
size = np.atleast_1d(size)
dshaped_variables = all((hasattr(var, "dshape") for var in input_vars))
if (
values
and dshaped_variables
and not all(
var.dshape == getattr(val, "shape", tuple())
for var, val in zip(input_vars, input_vals)
)
):
output = np.array([func(*v) for v in zip(*input_vals)])
elif size is not None and any(
(val.ndim > var.ndim) for var, val in zip(input_vars, input_vals)
):
output = np.array([func(*v) for v in zip(*input_vals)])
else:
output = func(*input_vals)
return output
raise ValueError("Unexpected type in draw_value: %s" % type(param))
|
def _draw_value(param, point=None, givens=None, size=None):
"""Draw a random value from a distribution or return a constant.
Parameters
----------
param : number, array like, theano variable or pymc3 random variable
The value or distribution. Constants or shared variables
will be converted to an array and returned. Theano variables
are evaluated. If `param` is a pymc3 random variables, draw
a new value from it and return that, unless a value is specified
in `point`.
point : dict, optional
A dictionary from pymc3 variable names to their values.
givens : dict, optional
A dictionary from theano variables to their values. These values
are used to evaluate `param` if it is a theano variable.
size : int, optional
Number of samples
"""
if isinstance(param, (numbers.Number, np.ndarray)):
return param
elif isinstance(param, tt.TensorConstant):
return param.value
elif isinstance(param, tt.sharedvar.SharedVariable):
return param.get_value()
elif isinstance(param, (tt.TensorVariable, MultiObservedRV)):
if point and hasattr(param, "model") and param.name in point:
return point[param.name]
elif hasattr(param, "random") and param.random is not None:
return param.random(point=point, size=size)
elif (
hasattr(param, "distribution")
and hasattr(param.distribution, "random")
and param.distribution.random is not None
):
if hasattr(param, "observations"):
# shape inspection for ObservedRV
dist_tmp = param.distribution
try:
distshape = param.observations.shape.eval()
except AttributeError:
distshape = param.observations.shape
dist_tmp.shape = distshape
try:
dist_tmp.random(point=point, size=size)
except (ValueError, TypeError):
# reset shape to account for shape changes
# with theano.shared inputs
dist_tmp.shape = np.array([])
val = np.atleast_1d(dist_tmp.random(point=point, size=None))
# Sometimes point may change the size of val but not the
# distribution's shape
if point and size is not None:
temp_size = np.atleast_1d(size)
if all(val.shape[: len(temp_size)] == temp_size):
dist_tmp.shape = val.shape[len(temp_size) :]
else:
dist_tmp.shape = val.shape
return dist_tmp.random(point=point, size=size)
else:
return param.distribution.random(point=point, size=size)
else:
if givens:
variables, values = list(zip(*givens))
else:
variables = values = []
func = _compile_theano_function(param, variables)
if size is not None:
size = np.atleast_1d(size)
dshaped_variables = all((hasattr(var, "dshape") for var in variables))
if (
values
and dshaped_variables
and not all(
var.dshape == getattr(val, "shape", tuple())
for var, val in zip(variables, values)
)
):
output = np.array([func(*v) for v in zip(*values)])
elif size is not None and any(
(val.ndim > var.ndim) for var, val in zip(variables, values)
):
output = np.array([func(*v) for v in zip(*values)])
else:
output = func(*values)
return output
raise ValueError("Unexpected type in draw_value: %s" % type(param))
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def to_tuple(shape):
"""Convert ints, arrays, and Nones to tuples"""
if shape is None:
return tuple()
temp = np.atleast_1d(shape)
if temp.size == 0:
return tuple()
else:
return tuple(temp)
|
def to_tuple(shape):
"""Convert ints, arrays, and Nones to tuples"""
if shape is None:
return tuple()
return tuple(np.atleast_1d(shape))
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def _comp_samples(self, point=None, size=None):
if self._comp_dists_vect or size is None:
try:
return self.comp_dists.random(point=point, size=size)
except AttributeError:
samples = np.array(
[
comp_dist.random(point=point, size=size)
for comp_dist in self.comp_dists
]
)
samples = np.moveaxis(samples, 0, samples.ndim - 1)
else:
# We must iterate the calls to random manually
size = to_tuple(size)
_size = int(np.prod(size))
try:
samples = np.array(
[self.comp_dists.random(point=point, size=None) for _ in range(_size)]
)
samples = np.reshape(samples, size + samples.shape[1:])
except AttributeError:
samples = np.array(
[
[comp_dist.random(point=point, size=None) for _ in range(_size)]
for comp_dist in self.comp_dists
]
)
samples = np.moveaxis(samples, 0, samples.ndim - 1)
samples = np.reshape(samples, size + samples[1:])
if samples.shape[-1] == 1:
return samples[..., 0]
else:
return samples
|
def _comp_samples(self, point=None, size=None):
try:
samples = self.comp_dists.random(point=point, size=size)
except AttributeError:
samples = np.column_stack(
[comp_dist.random(point=point, size=size) for comp_dist in self.comp_dists]
)
return np.squeeze(samples)
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def random(self, point=None, size=None):
# Convert size to tuple
size = to_tuple(size)
# Draw mixture weights and a sample from each mixture to infer shape
with _DrawValuesContext() as draw_context:
# We first need to check w and comp_tmp shapes and re compute size
w = draw_values([self.w], point=point, size=size)[0]
with _DrawValuesContextBlocker():
# We don't want to store the values drawn here in the context
# because they wont have the correct size
comp_tmp = self._comp_samples(point=point, size=None)
# When size is not None, it's hard to tell the w parameter shape
if size is not None and w.shape[: len(size)] == size:
w_shape = w.shape[len(size) :]
else:
w_shape = w.shape
# Try to determine parameter shape and dist_shape
param_shape = np.broadcast(np.empty(w_shape), comp_tmp).shape
if np.asarray(self.shape).size != 0:
dist_shape = np.broadcast(
np.empty(self.shape), np.empty(param_shape[:-1])
).shape
else:
dist_shape = param_shape[:-1]
# When size is not None, maybe dist_shape partially overlaps with size
if size is not None:
if size == dist_shape:
size = None
elif size[-len(dist_shape) :] == dist_shape:
size = size[: len(size) - len(dist_shape)]
# We get an integer _size instead of a tuple size for drawing the
# mixture, then we just reshape the output
if size is None:
_size = None
else:
_size = int(np.prod(size))
# Now we must broadcast w to the shape that considers size, dist_shape
# and param_shape. However, we must take care with the cases in which
# dist_shape and param_shape overlap
if size is not None and w.shape[: len(size)] == size:
if w.shape[: len(size + dist_shape)] != (size + dist_shape):
# To allow w to broadcast, we insert new axis in between the
# "size" axis and the "mixture" axis
_w = w[
(slice(None),) * len(size) # Index the size axis
+ (np.newaxis,) * len(dist_shape) # Add new axis for the dist_shape
+ (slice(None),)
] # Close with the slice of mixture components
w = np.broadcast_to(_w, size + dist_shape + (param_shape[-1],))
elif size is not None:
w = np.broadcast_to(w, size + dist_shape + (param_shape[-1],))
else:
w = np.broadcast_to(w, dist_shape + (param_shape[-1],))
# Compute the total size of the mixture's random call with size
if _size is not None:
output_size = int(_size * np.prod(dist_shape) * param_shape[-1])
else:
output_size = int(np.prod(dist_shape) * param_shape[-1])
# Get the size we need for the mixture's random call
mixture_size = int(output_size // np.prod(comp_tmp.shape))
if mixture_size == 1 and _size is None:
mixture_size = None
# Semiflatten the mixture weights. The last axis is the number of
# mixture mixture components, and the rest is all about size,
# dist_shape and broadcasting
w = np.reshape(w, (-1, w.shape[-1]))
# Normalize mixture weights
w = w / w.sum(axis=-1, keepdims=True)
w_samples = generate_samples(
random_choice,
p=w,
broadcast_shape=w.shape[:-1] or (1,),
dist_shape=w.shape[:-1] or (1,),
size=size,
)
# Sample from the mixture
with draw_context:
mixed_samples = self._comp_samples(point=point, size=mixture_size)
w_samples = w_samples.flatten()
# Semiflatten the mixture to be able to zip it with w_samples
mixed_samples = np.reshape(mixed_samples, (-1, comp_tmp.shape[-1]))
# Select the samples from the mixture
samples = np.array(
[mixed[choice] for choice, mixed in zip(w_samples, mixed_samples)]
)
# Reshape the samples to the correct output shape
if size is None:
samples = np.reshape(samples, dist_shape)
else:
samples = np.reshape(samples, size + dist_shape)
return samples
|
def random(self, point=None, size=None):
with _DrawValuesContext() as draw_context:
w = draw_values([self.w], point=point)[0]
comp_tmp = self._comp_samples(point=point, size=None)
if np.asarray(self.shape).size == 0:
distshape = np.asarray(np.broadcast(w, comp_tmp).shape)[..., :-1]
else:
distshape = np.asarray(self.shape)
# Normalize inputs
w /= w.sum(axis=-1, keepdims=True)
w_samples = generate_samples(
random_choice,
p=w,
broadcast_shape=w.shape[:-1] or (1,),
dist_shape=distshape,
size=size,
).squeeze()
if (size is None) or (distshape.size == 0):
with draw_context:
comp_samples = self._comp_samples(point=point, size=size)
if comp_samples.ndim > 1:
samples = np.squeeze(
comp_samples[np.arange(w_samples.size), ..., w_samples]
)
else:
samples = np.squeeze(comp_samples[w_samples])
else:
if w_samples.ndim == 1:
w_samples = np.reshape(np.tile(w_samples, size), (size,) + w_samples.shape)
samples = np.zeros((size,) + tuple(distshape))
with draw_context:
for i in range(size):
w_tmp = w_samples[i, :]
comp_tmp = self._comp_samples(point=point, size=None)
if comp_tmp.ndim > 1:
samples[i, :] = np.squeeze(
comp_tmp[np.arange(w_tmp.size), ..., w_tmp]
)
else:
samples[i, :] = np.squeeze(comp_tmp[w_tmp])
return samples
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def random(self, point=None, size=None):
if size is None:
size = tuple()
else:
if not isinstance(size, tuple):
try:
size = tuple(size)
except TypeError:
size = (size,)
if self._cov_type == "cov":
mu, cov = draw_values([self.mu, self.cov], point=point, size=size)
if mu.shape[-1] != cov.shape[-1]:
raise ValueError("Shapes for mu and cov don't match")
try:
dist = stats.multivariate_normal(mean=mu, cov=cov, allow_singular=True)
except ValueError:
size += (mu.shape[-1],)
return np.nan * np.zeros(size)
return dist.rvs(size)
elif self._cov_type == "chol":
mu, chol = draw_values([self.mu, self.chol_cov], point=point, size=size)
if size and mu.ndim == len(size) and mu.shape == size:
mu = mu[..., np.newaxis]
if mu.shape[-1] != chol.shape[-1] and mu.shape[-1] != 1:
raise ValueError("Shapes for mu and chol don't match")
broadcast_shape = np.broadcast(
np.empty(mu.shape[:-1]), np.empty(chol.shape[:-2])
).shape
mu = np.broadcast_to(mu, broadcast_shape + (chol.shape[-1],))
chol = np.broadcast_to(chol, broadcast_shape + chol.shape[-2:])
# If mu and chol were fixed by the point, only the standard normal
# should change
if mu.shape[: len(size)] != size:
std_norm_shape = size + mu.shape
else:
std_norm_shape = mu.shape
standard_normal = np.random.standard_normal(std_norm_shape)
return mu + np.tensordot(standard_normal, chol, axes=[[-1], [-1]])
else:
mu, tau = draw_values([self.mu, self.tau], point=point, size=size)
if mu.shape[-1] != tau[0].shape[-1]:
raise ValueError("Shapes for mu and tau don't match")
size += (mu.shape[-1],)
try:
chol = linalg.cholesky(tau, lower=True)
except linalg.LinAlgError:
return np.nan * np.zeros(size)
standard_normal = np.random.standard_normal(size)
transformed = linalg.solve_triangular(chol, standard_normal.T, lower=True)
return mu + transformed.T
|
def random(self, point=None, size=None):
if size is None:
size = []
else:
try:
size = list(size)
except TypeError:
size = [size]
if self._cov_type == "cov":
mu, cov = draw_values([self.mu, self.cov], point=point, size=size)
if mu.shape[-1] != cov.shape[-1]:
raise ValueError("Shapes for mu and cov don't match")
try:
dist = stats.multivariate_normal(mean=mu, cov=cov, allow_singular=True)
except ValueError:
size.append(mu.shape[-1])
return np.nan * np.zeros(size)
return dist.rvs(size)
elif self._cov_type == "chol":
mu, chol = draw_values([self.mu, self.chol_cov], point=point, size=size)
if mu.shape[-1] != chol[0].shape[-1]:
raise ValueError("Shapes for mu and chol don't match")
size.append(mu.shape[-1])
standard_normal = np.random.standard_normal(size)
return mu + np.dot(standard_normal, chol.T)
else:
mu, tau = draw_values([self.mu, self.tau], point=point, size=size)
if mu.shape[-1] != tau[0].shape[-1]:
raise ValueError("Shapes for mu and tau don't match")
size.append(mu.shape[-1])
try:
chol = linalg.cholesky(tau, lower=True)
except linalg.LinAlgError:
return np.nan * np.zeros(size)
standard_normal = np.random.standard_normal(size)
transformed = linalg.solve_triangular(chol, standard_normal.T, lower=True)
return mu + transformed.T
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def __init__(self, eta, n, sd_dist, *args, **kwargs):
self.n = n
self.eta = eta
if "transform" in kwargs and kwargs["transform"] is not None:
raise ValueError("Invalid parameter: transform.")
if "shape" in kwargs:
raise ValueError("Invalid parameter: shape.")
shape = n * (n + 1) // 2
if sd_dist.shape.ndim not in [0, 1]:
raise ValueError("Invalid shape for sd_dist.")
transform = transforms.CholeskyCovPacked(n)
kwargs["shape"] = shape
kwargs["transform"] = transform
super().__init__(*args, **kwargs)
self.sd_dist = sd_dist
self.diag_idxs = transform.diag_idxs
self.mode = floatX(np.zeros(shape))
self.mode[self.diag_idxs] = 1
|
def __init__(self, eta, n, sd_dist, *args, **kwargs):
self.n = n
self.eta = eta
if "transform" in kwargs:
raise ValueError("Invalid parameter: transform.")
if "shape" in kwargs:
raise ValueError("Invalid parameter: shape.")
shape = n * (n + 1) // 2
if sd_dist.shape.ndim not in [0, 1]:
raise ValueError("Invalid shape for sd_dist.")
transform = transforms.CholeskyCovPacked(n)
kwargs["shape"] = shape
kwargs["transform"] = transform
super().__init__(*args, **kwargs)
self.sd_dist = sd_dist
self.diag_idxs = transform.diag_idxs
self.mode = floatX(np.zeros(shape))
self.mode[self.diag_idxs] = 1
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def forward_val(self, y, point=None):
y[..., self.diag_idxs] = np.log(y[..., self.diag_idxs])
return y
|
def forward_val(self, y, point=None):
y[self.diag_idxs] = np.log(y[self.diag_idxs])
return y
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def _get_named_nodes_and_relations(
graph, parent, leaf_nodes, node_parents, node_children
):
if getattr(graph, "owner", None) is None: # Leaf node
if graph.name is not None: # Named leaf node
leaf_nodes.update({graph.name: graph})
if parent is not None: # Is None for the root node
try:
node_parents[graph].add(parent)
except KeyError:
node_parents[graph] = {parent}
node_children[parent].add(graph)
else:
node_parents[graph] = set()
# Flag that the leaf node has no children
node_children[graph] = set()
else: # Intermediate node
if graph.name is not None: # Intermediate named node
if parent is not None: # Is only None for the root node
try:
node_parents[graph].add(parent)
except KeyError:
node_parents[graph] = {parent}
node_children[parent].add(graph)
else:
node_parents[graph] = set()
# The current node will be set as the parent of the next
# nodes only if it is a named node
parent = graph
# Init the nodes children to an empty set
node_children[graph] = set()
for i in graph.owner.inputs:
temp_nodes, temp_inter, temp_tree = _get_named_nodes_and_relations(
i, parent, leaf_nodes, node_parents, node_children
)
leaf_nodes.update(temp_nodes)
node_parents.update(temp_inter)
node_children.update(temp_tree)
return leaf_nodes, node_parents, node_children
|
def _get_named_nodes_and_relations(
graph, parent, leaf_nodes, node_parents, node_children
):
if getattr(graph, "owner", None) is None: # Leaf node
if graph.name is not None: # Named leaf node
leaf_nodes.update({graph.name: graph})
if parent is not None: # Is None for the root node
try:
node_parents[graph].add(parent)
except KeyError:
node_parents[graph] = {parent}
node_children[parent].add(graph)
# Flag that the leaf node has no children
node_children[graph] = set()
else: # Intermediate node
if graph.name is not None: # Intermediate named node
if parent is not None: # Is only None for the root node
try:
node_parents[graph].add(parent)
except KeyError:
node_parents[graph] = {parent}
node_children[parent].add(graph)
# The current node will be set as the parent of the next
# nodes only if it is a named node
parent = graph
# Init the nodes children to an empty set
node_children[graph] = set()
for i in graph.owner.inputs:
temp_nodes, temp_inter, temp_tree = _get_named_nodes_and_relations(
i, parent, leaf_nodes, node_parents, node_children
)
leaf_nodes.update(temp_nodes)
node_parents.update(temp_inter)
node_children.update(temp_tree)
return leaf_nodes, node_parents, node_children
|
https://github.com/pymc-devs/pymc3/issues/3246
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-5-7300cc3c60ce> in <module>()
8
9 with model:
---> 10 pm.sample_prior_predictive(50)
11
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1332 names = get_default_varnames(model.named_vars, include_transformed=False)
1333 # draw_values fails with auto-transformed variables. transform them later!
-> 1334 values = draw_values([model[name] for name in names], size=samples)
1335
1336 data = {k: v for k, v in zip(names, values)}
~/anaconda3/lib/python3.6/site-packages/pymc3-3.5-py3.6.egg/pymc3/distributions/distribution.py in draw_values(params, point, size)
310 while to_eval or missing_inputs:
311 if to_eval == missing_inputs:
--> 312 raise ValueError('Cannot resolve inputs for {}'.format([str(params[j]) for j in to_eval]))
313 to_eval = set(missing_inputs)
314 missing_inputs = set()
ValueError: Cannot resolve inputs for ['chol_packed']
|
ValueError
|
def _random(self, n, p, size=None):
original_dtype = p.dtype
# Set float type to float64 for numpy. This change is related to numpy issue #8317 (https://github.com/numpy/numpy/issues/8317)
p = p.astype("float64")
# Now, re-normalize all of the values in float64 precision. This is done inside the conditionals
# np.random.multinomial needs `n` to be a scalar int and `p` a
# sequence
if p.ndim == 1 and (n.ndim == 0 or (n.ndim == 1 and n.shape[0] == 1)):
# If `n` is already a scalar and `p` is a sequence, then just
# return np.multinomial with some size handling
p = p / p.sum()
if size is not None:
if size == p.shape:
size = None
elif size[-len(p.shape) :] == p.shape:
size = size[: len(size) - len(p.shape)]
randnum = np.random.multinomial(n, p, size=size)
return randnum.astype(original_dtype)
# The shapes of `p` and `n` must be broadcasted by hand depending on
# their ndim. We will assume that the last axis of the `p` array will
# be the sequence to feed into np.random.multinomial. The other axis
# will only have to be iterated over.
if n.ndim == p.ndim:
# p and n have the same ndim, so n.shape[-1] must be 1
if n.shape[-1] != 1:
raise ValueError(
"If n and p have the same number of "
"dimensions, the last axis of n must be "
"have len 1. Got {} instead.\n"
"n.shape = {}\n"
"p.shape = {}.".format(n.shape[-1], n.shape, p.shape)
)
n_p_shape = np.broadcast(np.empty(p.shape[:-1]), np.empty(n.shape[:-1])).shape
p = np.broadcast_to(p, n_p_shape + (p.shape[-1],))
n = np.broadcast_to(n, n_p_shape + (1,))
elif n.ndim == p.ndim - 1:
# n has the number of dimensions of p for the iteration, these must
# broadcast together
n_p_shape = np.broadcast(np.empty(p.shape[:-1]), n).shape
p = np.broadcast_to(p, n_p_shape + (p.shape[-1],))
n = np.broadcast_to(n, n_p_shape + (1,))
elif p.ndim == 1:
# p only has the sequence array. We extend it with the dimensions
# of n
n_p_shape = n.shape
p = np.broadcast_to(p, n_p_shape + (p.shape[-1],))
n = np.broadcast_to(n, n_p_shape + (1,))
elif n.ndim == 0 or (n.dim == 1 and n.shape[0] == 1):
# n is a scalar. We extend it with the dimensions of p
n_p_shape = p.shape[:-1]
n = np.broadcast_to(n, n_p_shape + (1,))
else:
# There is no clear rule to broadcast p and n so we raise an error
raise ValueError(
"Incompatible shapes of n and p.\nn.shape = {}\np.shape = {}".format(
n.shape, p.shape
)
)
# Check what happens with size
if size is not None:
if size == p.shape:
size = None
_size = 1
elif size[-len(p.shape) :] == p.shape:
size = size[: len(size) - len(p.shape)]
_size = np.prod(size)
else:
_size = np.prod(size)
else:
_size = 1
# We now flatten p and n up to the last dimension
p_shape = p.shape
p = np.reshape(p, (np.prod(n_p_shape), -1))
n = np.reshape(n, (np.prod(n_p_shape), -1))
# We renormalize p
p = p / p.sum(axis=1, keepdims=True)
# We iterate calls to np.random.multinomial
randnum = np.asarray(
[np.random.multinomial(nn, pp, size=_size) for (nn, pp) in zip(n, p)]
)
# We swap the iteration axis with the _size axis
randnum = np.moveaxis(randnum, 1, 0)
# We reshape the random numbers to the corresponding size + p_shape
if size is None:
randnum = np.reshape(randnum, p_shape)
else:
randnum = np.reshape(randnum, size + p_shape)
# We cast back to the original dtype
return randnum.astype(original_dtype)
|
def _random(self, n, p, size=None):
original_dtype = p.dtype
# Set float type to float64 for numpy. This change is related to numpy issue #8317 (https://github.com/numpy/numpy/issues/8317)
p = p.astype("float64")
# Now, re-normalize all of the values in float64 precision. This is done inside the conditionals
if size == p.shape:
size = None
elif size[-len(p.shape) :] == p.shape:
size = size[: len(size) - len(p.shape)]
n_dim = n.squeeze().ndim
if (n_dim == 0) and (p.ndim == 1):
p = p / p.sum()
randnum = np.random.multinomial(n, p.squeeze(), size=size)
elif (n_dim == 0) and (p.ndim > 1):
p = p / p.sum(axis=1, keepdims=True)
randnum = np.asarray(
[np.random.multinomial(n.squeeze(), pp, size=size) for pp in p]
)
randnum = np.moveaxis(randnum, 1, 0)
elif (n_dim > 0) and (p.ndim == 1):
p = p / p.sum()
randnum = np.asarray(
[np.random.multinomial(nn, p.squeeze(), size=size) for nn in n]
)
randnum = np.moveaxis(randnum, 1, 0)
else:
p = p / p.sum(axis=1, keepdims=True)
randnum = np.asarray(
[np.random.multinomial(nn, pp, size=size) for (nn, pp) in zip(n, p)]
)
randnum = np.moveaxis(randnum, 1, 0)
return randnum.astype(original_dtype)
|
https://github.com/pymc-devs/pymc3/issues/3271
|
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-10-06599b7f288c> in <module>()
----> 1 sim_priors = pm.sample_prior_predictive(samples=1000, model=dm_model, random_seed=RANDOM_SEED)
/anaconda/envs/cdf/lib/python3.6/site-packages/pymc3/sampling.py in sample_prior_predictive(samples, model, vars, random_seed)
1314 names = get_default_varnames(model.named_vars, include_transformed=False)
1315 # draw_values fails with auto-transformed variables. transform them later!
-> 1316 values = draw_values([model[name] for name in names], size=samples)
1317
1318 data = {k: v for k, v in zip(names, values)}
/anaconda/envs/cdf/lib/python3.6/site-packages/pymc3/distributions/distribution.py in draw_values(params, point, size)
319 else:
320 try: # might evaluate in a bad order,
--> 321 evaluated[param_idx] = _draw_value(param, point=point, givens=givens.values(), size=size)
322 if isinstance(param, collections.Hashable) and named_nodes_parents.get(param):
323 givens[param.name] = (param, evaluated[param_idx])
/anaconda/envs/cdf/lib/python3.6/site-packages/pymc3/distributions/distribution.py in _draw_value(param, point, givens, size)
403 val = dist_tmp.random(point=point, size=None)
404 dist_tmp.shape = val.shape
--> 405 return dist_tmp.random(point=point, size=size)
406 else:
407 return param.distribution.random(point=point, size=size)
/anaconda/envs/cdf/lib/python3.6/site-packages/pymc3/distributions/multivariate.py in random(self, point, size)
571 samples = generate_samples(self._random, n, p,
572 dist_shape=self.shape,
--> 573 size=size)
574 return samples
575
/anaconda/envs/cdf/lib/python3.6/site-packages/pymc3/distributions/distribution.py in generate_samples(generator, *args, **kwargs)
512 elif broadcast_shape[:len(size_tup)] == size_tup:
513 suffix = broadcast_shape[len(size_tup):] + dist_shape
--> 514 samples = [generator(*args, **kwargs).reshape(size_tup + (1,)) for _ in range(np.prod(suffix, dtype=int))]
515 samples = np.hstack(samples).reshape(size_tup + suffix)
516 else:
/anaconda/envs/cdf/lib/python3.6/site-packages/pymc3/distributions/distribution.py in <listcomp>(.0)
512 elif broadcast_shape[:len(size_tup)] == size_tup:
513 suffix = broadcast_shape[len(size_tup):] + dist_shape
--> 514 samples = [generator(*args, **kwargs).reshape(size_tup + (1,)) for _ in range(np.prod(suffix, dtype=int))]
515 samples = np.hstack(samples).reshape(size_tup + suffix)
516 else:
/anaconda/envs/cdf/lib/python3.6/site-packages/pymc3/distributions/multivariate.py in _random(self, n, p, size)
536 if size == p.shape:
537 size = None
--> 538 elif size[-len(p.shape):] == p.shape:
539 size = size[:len(size) - len(p.shape)]
540
TypeError: 'NoneType' object is not subscriptable
|
TypeError
|
def __call__(self, name, *args, **kwargs):
if "observed" in kwargs:
raise ValueError(
"Observed Bound distributions are not supported. "
"If you want to model truncated data "
"you can use a pm.Potential in combination "
"with the cumulative probability function. See "
"pymc3/examples/censored_data.py for an example."
)
if issubclass(self.distribution, Continuous):
return _ContinuousBounded(
name, self.distribution, self.lower, self.upper, *args, **kwargs
)
elif issubclass(self.distribution, Discrete):
return _DiscreteBounded(
name, self.distribution, self.lower, self.upper, *args, **kwargs
)
else:
raise ValueError("Distribution is neither continuous nor discrete.")
|
def __call__(self, *args, **kwargs):
if "observed" in kwargs:
raise ValueError(
"Observed Bound distributions are not supported. "
"If you want to model truncated data "
"you can use a pm.Potential in combination "
"with the cumulative probability function. See "
"pymc3/examples/censored_data.py for an example."
)
first, args = args[0], args[1:]
if issubclass(self.distribution, Continuous):
return _ContinuousBounded(
first, self.distribution, self.lower, self.upper, *args, **kwargs
)
elif issubclass(self.distribution, Discrete):
return _DiscreteBounded(
first, self.distribution, self.lower, self.upper, *args, **kwargs
)
else:
raise ValueError("Distribution is neither continuous nor discrete.")
|
https://github.com/pymc-devs/pymc3/issues/3149
|
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
<ipython-input-18-c9645cb7d458> in <module>()
3 with example:
4 BoundPoisson = pm.Bound(pm.Poisson, upper = 6)
----> 5 y = BoundPoisson(name = "y", mu = 1)
~/miniconda3/lib/python3.6/site-packages/pymc3/distributions/bound.py in __call__(self, *args, **kwargs)
209 'with the cumulative probability function. See '
210 'pymc3/examples/censored_data.py for an example.')
--> 211 first, args = args[0], args[1:]
212
213 if issubclass(self.distribution, Continuous):
IndexError: tuple index out of range
|
IndexError
|
def _run_convergence_checks(self, trace, model):
if trace.nchains == 1:
msg = (
"Only one chain was sampled, this makes it impossible to "
"run some convergence checks"
)
warn = SamplerWarning(WarningType.BAD_PARAMS, msg, "info", None, None, None)
self._add_warnings([warn])
return
from pymc3 import diagnostics
valid_name = [rv.name for rv in model.free_RVs + model.deterministics]
varnames = []
for rv in model.free_RVs:
rv_name = rv.name
if is_transformed_name(rv_name):
rv_name2 = get_untransformed_name(rv_name)
rv_name = rv_name2 if rv_name2 in valid_name else rv_name
if rv_name in trace.varnames:
varnames.append(rv_name)
self._effective_n = effective_n = diagnostics.effective_n(trace, varnames)
self._gelman_rubin = gelman_rubin = diagnostics.gelman_rubin(trace, varnames)
warnings = []
rhat_max = max(val.max() for val in gelman_rubin.values())
if rhat_max > 1.4:
msg = (
"The gelman-rubin statistic is larger than 1.4 for some "
"parameters. The sampler did not converge."
)
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "error", None, None, gelman_rubin
)
warnings.append(warn)
elif rhat_max > 1.2:
msg = "The gelman-rubin statistic is larger than 1.2 for some parameters."
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "warn", None, None, gelman_rubin
)
warnings.append(warn)
elif rhat_max > 1.05:
msg = (
"The gelman-rubin statistic is larger than 1.05 for some "
"parameters. This indicates slight problems during "
"sampling."
)
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "info", None, None, gelman_rubin
)
warnings.append(warn)
eff_min = min(val.min() for val in effective_n.values())
n_samples = len(trace) * trace.nchains
if eff_min < 200 and n_samples >= 500:
msg = (
"The estimated number of effective samples is smaller than "
"200 for some parameters."
)
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "error", None, None, effective_n
)
warnings.append(warn)
elif eff_min / n_samples < 0.1:
msg = "The number of effective samples is smaller than 10% for some parameters."
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "warn", None, None, effective_n
)
warnings.append(warn)
elif eff_min / n_samples < 0.25:
msg = "The number of effective samples is smaller than 25% for some parameters."
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "info", None, None, effective_n
)
warnings.append(warn)
self._add_warnings(warnings)
|
def _run_convergence_checks(self, trace, model):
if trace.nchains == 1:
msg = (
"Only one chain was sampled, this makes it impossible to "
"run some convergence checks"
)
warn = SamplerWarning(WarningType.BAD_PARAMS, msg, "info", None, None, None)
self._add_warnings([warn])
return
from pymc3 import diagnostics
valid_name = [rv.name for rv in model.free_RVs + model.deterministics]
varnames = []
for rv in model.free_RVs:
rv_name = rv.name
if is_transformed_name(rv_name):
rv_name2 = get_untransformed_name(rv_name)
rv_name = rv_name2 if rv_name2 in valid_name else rv_name
varnames.append(rv_name)
self._effective_n = effective_n = diagnostics.effective_n(trace, varnames)
self._gelman_rubin = gelman_rubin = diagnostics.gelman_rubin(trace, varnames)
warnings = []
rhat_max = max(val.max() for val in gelman_rubin.values())
if rhat_max > 1.4:
msg = (
"The gelman-rubin statistic is larger than 1.4 for some "
"parameters. The sampler did not converge."
)
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "error", None, None, gelman_rubin
)
warnings.append(warn)
elif rhat_max > 1.2:
msg = "The gelman-rubin statistic is larger than 1.2 for some parameters."
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "warn", None, None, gelman_rubin
)
warnings.append(warn)
elif rhat_max > 1.05:
msg = (
"The gelman-rubin statistic is larger than 1.05 for some "
"parameters. This indicates slight problems during "
"sampling."
)
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "info", None, None, gelman_rubin
)
warnings.append(warn)
eff_min = min(val.min() for val in effective_n.values())
n_samples = len(trace) * trace.nchains
if eff_min < 200 and n_samples >= 500:
msg = (
"The estimated number of effective samples is smaller than "
"200 for some parameters."
)
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "error", None, None, effective_n
)
warnings.append(warn)
elif eff_min / n_samples < 0.1:
msg = "The number of effective samples is smaller than 10% for some parameters."
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "warn", None, None, effective_n
)
warnings.append(warn)
elif eff_min / n_samples < 0.25:
msg = "The number of effective samples is smaller than 25% for some parameters."
warn = SamplerWarning(
WarningType.CONVERGENCE, msg, "info", None, None, effective_n
)
warnings.append(warn)
self._add_warnings(warnings)
|
https://github.com/pymc-devs/pymc3/issues/2933
|
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
<ipython-input-7-45e332f1b8ef> in <module>()
1 with model:
----> 2 trace = sample(1000, trace=[dL0])
~/Repos/pymc3/pymc3/sampling.py in sample(draws, step, init, n_init, start, trace, chain_idx, chains, cores, tune, nuts_kwargs, step_kwargs, progressbar, model, random_seed, live_plot, discard_tuned_samples, live_plot_kwargs, compute_convergence_checks, use_mmap, **kwargs)
471 "convergence reliably.")
472 else:
--> 473 trace.report._run_convergence_checks(trace, model)
474
475 trace.report._log_summary()
~/Repos/pymc3/pymc3/backends/report.py in _run_convergence_checks(self, trace, model)
80 varnames.append(rv_name)
81
---> 82 self._effective_n = effective_n = diagnostics.effective_n(trace, varnames)
83 self._gelman_rubin = gelman_rubin = diagnostics.gelman_rubin(trace, varnames)
84
~/Repos/pymc3/pymc3/diagnostics.py in effective_n(mtrace, varnames, include_transformed)
298
299 for var in varnames:
--> 300 n_eff[var] = generate_neff(mtrace.get_values(var, combine=False))
301
302 return n_eff
~/Repos/pymc3/pymc3/backends/base.py in get_values(self, varname, burn, thin, combine, chains, squeeze)
426 try:
427 results = [self._straces[chain].get_values(varname, burn, thin)
--> 428 for chain in chains]
429 except TypeError: # Single chain passed.
430 results = [self._straces[chains].get_values(varname, burn, thin)]
~/Repos/pymc3/pymc3/backends/base.py in <listcomp>(.0)
426 try:
427 results = [self._straces[chain].get_values(varname, burn, thin)
--> 428 for chain in chains]
429 except TypeError: # Single chain passed.
430 results = [self._straces[chains].get_values(varname, burn, thin)]
~/Repos/pymc3/pymc3/backends/ndarray.py in get_values(self, varname, burn, thin)
141 A NumPy array
142 """
--> 143 return self.samples[varname][burn::thin]
144
145 def _slice(self, idx):
KeyError: 'beta0'
|
KeyError
|
def __init__(self, distribution, lower, upper, transform="infer", *args, **kwargs):
self.dist = distribution.dist(*args, **kwargs)
self.__dict__.update(self.dist.__dict__)
self.__dict__.update(locals())
if hasattr(self.dist, "mode"):
self.mode = self.dist.mode
if transform == "infer":
self.transform, self.testval = self._infer(lower, upper)
|
def __init__(self, distribution, lower, upper, transform="infer", *args, **kwargs):
self.dist = distribution.dist(*args, **kwargs)
self.__dict__.update(self.dist.__dict__)
self.__dict__.update(locals())
if hasattr(self.dist, "mode"):
self.mode = self.dist.mode
if transform == "infer":
default = self.dist.default()
if not np.isinf(lower) and not np.isinf(upper):
self.transform = transforms.interval(lower, upper)
if default <= lower or default >= upper:
self.testval = 0.5 * (upper + lower)
if not np.isinf(lower) and np.isinf(upper):
self.transform = transforms.lowerbound(lower)
if default <= lower:
self.testval = lower + 1
if np.isinf(lower) and not np.isinf(upper):
self.transform = transforms.upperbound(upper)
if default >= upper:
self.testval = upper - 1
|
https://github.com/pymc-devs/pymc3/issues/1491
|
Traceback (most recent call last):
File "garch_example.py", line 40, in <module>
beta1 = BoundedNormal('beta1', 0, sd=1e6)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/continuous.py", line 1102, in __call__
*args, **kwargs)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/distribution.py", line 27, in __new__
return model.Var(name, dist, data)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 288, in Var
transform=dist.transform)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 689, in __init__
transformed_name = "{}_{}_".format(name, transform.name)
AttributeError: 'int' object has no attribute 'name'
|
AttributeError
|
def __init__(self, *args, **kwargs):
first, args = args[0], args[1:]
super(self, _BoundedDist).__init__(
first, distribution, lower, upper, *args, **kwargs
)
|
def __init__(self, distribution, lower=-np.inf, upper=np.inf):
self.distribution = distribution
self.lower = lower
self.upper = upper
|
https://github.com/pymc-devs/pymc3/issues/1491
|
Traceback (most recent call last):
File "garch_example.py", line 40, in <module>
beta1 = BoundedNormal('beta1', 0, sd=1e6)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/continuous.py", line 1102, in __call__
*args, **kwargs)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/distribution.py", line 27, in __new__
return model.Var(name, dist, data)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 288, in Var
transform=dist.transform)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 689, in __init__
transformed_name = "{}_{}_".format(name, transform.name)
AttributeError: 'int' object has no attribute 'name'
|
AttributeError
|
def dist(cls, *args, **kwargs):
return Bounded.dist(distribution, lower, upper, *args, **kwargs)
|
def dist(self, *args, **kwargs):
return Bounded.dist(self.distribution, self.lower, self.upper, *args, **kwargs)
|
https://github.com/pymc-devs/pymc3/issues/1491
|
Traceback (most recent call last):
File "garch_example.py", line 40, in <module>
beta1 = BoundedNormal('beta1', 0, sd=1e6)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/continuous.py", line 1102, in __call__
*args, **kwargs)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/distribution.py", line 27, in __new__
return model.Var(name, dist, data)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 288, in Var
transform=dist.transform)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 689, in __init__
transformed_name = "{}_{}_".format(name, transform.name)
AttributeError: 'int' object has no attribute 'name'
|
AttributeError
|
def __init__(self, *args, **kwargs):
first, args = args[0], args[1:]
super(self, _BoundedDist).__init__(
first, distribution, lower, upper, *args, **kwargs
)
|
def __init__(self, mu=0.0, sd=None, tau=None, alpha=1, *args, **kwargs):
super(SkewNormal, self).__init__(*args, **kwargs)
self.mu = mu
self.tau, self.sd = get_tau_sd(tau=tau, sd=sd)
self.alpha = alpha
self.mean = mu + self.sd * (2 / np.pi) ** 0.5 * alpha / (1 + alpha**2) ** 0.5
self.variance = self.sd**2 * (1 - (2 * alpha**2) / ((1 + alpha**2) * np.pi))
assert_negative_support(tau, "tau", "SkewNormal")
assert_negative_support(sd, "sd", "SkewNormal")
|
https://github.com/pymc-devs/pymc3/issues/1491
|
Traceback (most recent call last):
File "garch_example.py", line 40, in <module>
beta1 = BoundedNormal('beta1', 0, sd=1e6)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/continuous.py", line 1102, in __call__
*args, **kwargs)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/distribution.py", line 27, in __new__
return model.Var(name, dist, data)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 288, in Var
transform=dist.transform)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 689, in __init__
transformed_name = "{}_{}_".format(name, transform.name)
AttributeError: 'int' object has no attribute 'name'
|
AttributeError
|
def run(n=1000):
if n == "short":
n = 50
with get_garch_model():
tr = sample(n, n_init=10000)
return tr
|
def run(n=1000):
if n == "short":
n = 50
with garch:
tr = sample(n)
|
https://github.com/pymc-devs/pymc3/issues/1491
|
Traceback (most recent call last):
File "garch_example.py", line 40, in <module>
beta1 = BoundedNormal('beta1', 0, sd=1e6)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/continuous.py", line 1102, in __call__
*args, **kwargs)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/distributions/distribution.py", line 27, in __new__
return model.Var(name, dist, data)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 288, in Var
transform=dist.transform)
File "/Users/**/anaconda3/envs/py35/lib/python3.5/site-packages/pymc3/model.py", line 689, in __init__
transformed_name = "{}_{}_".format(name, transform.name)
AttributeError: 'int' object has no attribute 'name'
|
AttributeError
|
def __init__(self, n, p, *args, **kwargs):
super(Multinomial, self).__init__(*args, **kwargs)
p = p / tt.sum(p, axis=-1, keepdims=True)
n = np.squeeze(n) # works also if n is a tensor
if len(self.shape) > 1:
m = self.shape[-2]
try:
assert n.shape == (m,)
except (AttributeError, AssertionError):
n = n * tt.ones(m)
self.n = tt.shape_padright(n)
self.p = p if p.ndim > 1 else tt.shape_padleft(p)
elif n.ndim == 1:
self.n = tt.shape_padright(n)
self.p = p if p.ndim > 1 else tt.shape_padleft(p)
else:
# n is a scalar, p is a 1d array
self.n = tt.as_tensor_variable(n)
self.p = tt.as_tensor_variable(p)
self.mean = self.n * self.p
mode = tt.cast(tt.round(self.mean), "int32")
diff = self.n - tt.sum(mode, axis=-1, keepdims=True)
inc_bool_arr = tt.abs_(diff) > 0
mode = tt.inc_subtensor(mode[inc_bool_arr.nonzero()], diff[inc_bool_arr.nonzero()])
self.mode = mode
|
def __init__(self, n, p, *args, **kwargs):
super(Multinomial, self).__init__(*args, **kwargs)
p = p / tt.sum(p, axis=-1, keepdims=True)
lst = range(self.shape[-1])
if len(self.shape) > 1:
m = self.shape[-2]
try:
assert n.shape == (m,)
except AttributeError:
n *= tt.ones(m)
self.n = tt.shape_padright(n)
self.p = p if p.ndim > 1 else tt.shape_padleft(p)
lst = list(lst for _ in range(m))
else:
# n is a scalar, p is a 1d array
self.n = tt.as_tensor_variable(n)
self.p = tt.as_tensor_variable(p)
self.mean = self.n * self.p
mode = tt.cast(tt.round(self.mean), "int32")
diff = self.n - tt.sum(mode, axis=-1, keepdims=True)
inc_bool_arr = tt.as_tensor_variable(lst) < diff
mode = tt.inc_subtensor(mode[inc_bool_arr.nonzero()], 1)
dec_bool_arr = tt.as_tensor_variable(lst) < -diff
mode = tt.inc_subtensor(mode[dec_bool_arr.nonzero()], -1)
self.mode = mode
|
https://github.com/pymc-devs/pymc3/issues/2550
|
import numpy as np
import pandas as pd
import pymc3 as pm
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(color_codes=True)
import theano
from scipy.stats import norm
def hierarchical_normal(name, shape, mu=0.,cs=5.):
delta = pm.Normal('delta_{}'.format(name), 0., 1., shape=shape)
sigma = pm.HalfCauchy('sigma_{}'.format(name), cs)
return pm.Deterministic(name, mu + delta * sigma)
NUTS_KWARGS = {'target_accept': 0.99}
SEED = 4260026 # from random.org, for reproducibility
np.random.seed(SEED)
ndraws = 1000
counts =[[19, 50, 37],
[21, 67, 55],
[11, 53, 38],
[17, 54, 45],
[24, 93, 66],
[27, 53, 70]]
counts = pd.DataFrame(counts,columns=["A","B","C"])
counts["n"] = counts[["A","B","C"]].sum(axis=1)
print counts
group = counts.index.values
n_group = np.unique(group).size
obs_n = np.reshape(counts.n.values,(6,1))
obs_n_ = theano.shared(obs_n)
obs_ABC = counts[["A","B","C"]].values
with pm.Model() as model:
#Zeros for coefficients for A
ref = pm.Flat("ref",shape=n_group)
#For B
beta0 = pm.Normal('beta0', 0.,sd=5.)
beta_group = hierarchical_normal('beta_group', n_group)
#For C
#alpha0 = pm.Normal('alpha0', 0.,sd=5.)
alpha_group = hierarchical_normal('alpha_group', n_group)
eta_B = beta0 + beta_group [group]
#eta_C = alpha0 + alpha_group[group]
eta_C = alpha_group[group]
p = theano.tensor.nnet.softmax(theano.tensor.stack(ref,eta_B,eta_C).T)
like = pm.Multinomial('obs_ABC', obs_n, p, observed=obs_ABC)
trace = pm.sample(draws=ndraws, random_seed=SEED,nuts_kwargs=NUTS_KWARGS)
plt.figure()
axs = pm.forestplot(trace,varnames=['beta0','beta_group','alpha_group'])
plt.savefig("Forest.png")
plt.close()
plt.figure()
axs = pm.traceplot(trace,varnames=['beta0','beta_group','alpha_group'])
plt.savefig("Trace.png")
plt.close()
obs_n_.set_value(np.reshape(np.array([10000]*6),(6,1)))
pp_trace = pm.sample_ppc(trace, model=model)
with open('softmax_PP.pkl', 'wb') as buff:
pickle.dump(pp_trace, buff)
with open('softmax_PP.pkl', 'rb') as buff:
pp_trace = pickle.load(buff)
print pp_trace["obs_ABC"]
_________________________________________________________________________________
A B C n
0 19 50 37 106
1 21 67 55 143
2 11 53 38 102
3 17 54 45 116
4 24 93 66 183
5 27 53 70 150
Auto-assigning NUTS sampler...
Initializing NUTS using ADVI...
Average Loss = 49.994: 12%|ββββββββββ | 23062/200000 [00:04<00:36, 4843.61it/s]
Convergence archived at 23500
Interrupted at 23,500 [11%]: Average Loss = 195
100%|ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ| 1500/1500 [00:18<00:00, 81.01it/s]
/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/matplotlib/tight_layout.py:222: UserWarning: tight_layout : falling back to Agg renderer
warnings.warn("tight_layout : falling back to Agg renderer")
0%| | 0/1000 [00:00<?, ?it/s]
Traceback (most recent call last):
File "softmax.py", line 77, in <module>
pp_trace = pm.sample_ppc(trace, model=model)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/sampling.py", line 539, in sample_ppc
vals = var.distribution.random(point=param, size=size)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/distributions/multivariate.py", line 506, in random
size=size)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/distributions/distribution.py", line 397, in generate_samples
*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/distributions/distribution.py", line 322, in replicate_samples
samples = generator(size=size, *args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/distributions/multivariate.py", line 500, in _random
return np.random.multinomial(n, p, size=size)
File "mtrand.pyx", line 4530, in mtrand.RandomState.multinomial (numpy/random/mtrand/mtrand.c:37665)
TypeError: only length-1 arrays can be converted to Python scalars
|
TypeError
|
def _random(self, n, p, size=None):
original_dtype = p.dtype
# Set float type to float64 for numpy. This change is related to numpy issue #8317 (https://github.com/numpy/numpy/issues/8317)
p = p.astype("float64")
# Now, re-normalize all of the values in float64 precision. This is done inside the conditionals
if size == p.shape:
size = None
if (n.ndim == 0) and (p.ndim == 1):
p = p / p.sum()
randnum = np.random.multinomial(n, p.squeeze(), size=size)
elif (n.ndim == 0) and (p.ndim > 1):
p = p / p.sum(axis=1, keepdims=True)
randnum = np.asarray(
[np.random.multinomial(n.squeeze(), pp, size=size) for pp in p]
)
elif (n.ndim > 0) and (p.ndim == 1):
p = p / p.sum()
randnum = np.asarray(
[np.random.multinomial(nn, p.squeeze(), size=size) for nn in n]
)
else:
p = p / p.sum(axis=1, keepdims=True)
randnum = np.asarray(
[np.random.multinomial(nn, pp, size=size) for (nn, pp) in zip(n, p)]
)
return randnum.astype(original_dtype)
|
def _random(self, n, p, size=None):
original_dtype = p.dtype
# Set float type to float64 for numpy. This change is related to numpy issue #8317 (https://github.com/numpy/numpy/issues/8317)
p = p.astype("float64")
# Now, re-normalize all of the values in float64 precision. This is done inside the conditionals
if size == p.shape:
size = None
if p.ndim == 1:
p = p / p.sum()
randnum = np.random.multinomial(n, p.squeeze(), size=size)
elif p.ndim == 2:
p = p / p.sum(axis=1, keepdims=True)
randnum = np.asarray(
[np.random.multinomial(nn, pp, size=size) for (nn, pp) in zip(n, p)]
)
else:
raise ValueError(
"Outcome probabilities must be 1- or 2-dimensional "
"(supplied `p` has {} dimensions)".format(p.ndim)
)
return randnum.astype(original_dtype)
|
https://github.com/pymc-devs/pymc3/issues/2550
|
import numpy as np
import pandas as pd
import pymc3 as pm
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(color_codes=True)
import theano
from scipy.stats import norm
def hierarchical_normal(name, shape, mu=0.,cs=5.):
delta = pm.Normal('delta_{}'.format(name), 0., 1., shape=shape)
sigma = pm.HalfCauchy('sigma_{}'.format(name), cs)
return pm.Deterministic(name, mu + delta * sigma)
NUTS_KWARGS = {'target_accept': 0.99}
SEED = 4260026 # from random.org, for reproducibility
np.random.seed(SEED)
ndraws = 1000
counts =[[19, 50, 37],
[21, 67, 55],
[11, 53, 38],
[17, 54, 45],
[24, 93, 66],
[27, 53, 70]]
counts = pd.DataFrame(counts,columns=["A","B","C"])
counts["n"] = counts[["A","B","C"]].sum(axis=1)
print counts
group = counts.index.values
n_group = np.unique(group).size
obs_n = np.reshape(counts.n.values,(6,1))
obs_n_ = theano.shared(obs_n)
obs_ABC = counts[["A","B","C"]].values
with pm.Model() as model:
#Zeros for coefficients for A
ref = pm.Flat("ref",shape=n_group)
#For B
beta0 = pm.Normal('beta0', 0.,sd=5.)
beta_group = hierarchical_normal('beta_group', n_group)
#For C
#alpha0 = pm.Normal('alpha0', 0.,sd=5.)
alpha_group = hierarchical_normal('alpha_group', n_group)
eta_B = beta0 + beta_group [group]
#eta_C = alpha0 + alpha_group[group]
eta_C = alpha_group[group]
p = theano.tensor.nnet.softmax(theano.tensor.stack(ref,eta_B,eta_C).T)
like = pm.Multinomial('obs_ABC', obs_n, p, observed=obs_ABC)
trace = pm.sample(draws=ndraws, random_seed=SEED,nuts_kwargs=NUTS_KWARGS)
plt.figure()
axs = pm.forestplot(trace,varnames=['beta0','beta_group','alpha_group'])
plt.savefig("Forest.png")
plt.close()
plt.figure()
axs = pm.traceplot(trace,varnames=['beta0','beta_group','alpha_group'])
plt.savefig("Trace.png")
plt.close()
obs_n_.set_value(np.reshape(np.array([10000]*6),(6,1)))
pp_trace = pm.sample_ppc(trace, model=model)
with open('softmax_PP.pkl', 'wb') as buff:
pickle.dump(pp_trace, buff)
with open('softmax_PP.pkl', 'rb') as buff:
pp_trace = pickle.load(buff)
print pp_trace["obs_ABC"]
_________________________________________________________________________________
A B C n
0 19 50 37 106
1 21 67 55 143
2 11 53 38 102
3 17 54 45 116
4 24 93 66 183
5 27 53 70 150
Auto-assigning NUTS sampler...
Initializing NUTS using ADVI...
Average Loss = 49.994: 12%|ββββββββββ | 23062/200000 [00:04<00:36, 4843.61it/s]
Convergence archived at 23500
Interrupted at 23,500 [11%]: Average Loss = 195
100%|ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ| 1500/1500 [00:18<00:00, 81.01it/s]
/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/matplotlib/tight_layout.py:222: UserWarning: tight_layout : falling back to Agg renderer
warnings.warn("tight_layout : falling back to Agg renderer")
0%| | 0/1000 [00:00<?, ?it/s]
Traceback (most recent call last):
File "softmax.py", line 77, in <module>
pp_trace = pm.sample_ppc(trace, model=model)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/sampling.py", line 539, in sample_ppc
vals = var.distribution.random(point=param, size=size)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/distributions/multivariate.py", line 506, in random
size=size)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/distributions/distribution.py", line 397, in generate_samples
*args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/distributions/distribution.py", line 322, in replicate_samples
samples = generator(size=size, *args, **kwargs)
File "/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/pymc3/distributions/multivariate.py", line 500, in _random
return np.random.multinomial(n, p, size=size)
File "mtrand.pyx", line 4530, in mtrand.RandomState.multinomial (numpy/random/mtrand/mtrand.c:37665)
TypeError: only length-1 arrays can be converted to Python scalars
|
TypeError
|
def init_nuts(
init="auto",
njobs=1,
n_init=500000,
model=None,
random_seed=-1,
progressbar=True,
**kwargs,
):
"""Set up the mass matrix initialization for NUTS.
NUTS convergence and sampling speed is extremely dependent on the
choice of mass/scaling matrix. This function implements different
methods for choosing or adapting the mass matrix.
Parameters
----------
init : str
Initialization method to use.
* auto : Choose a default initialization method automatically.
Currently, this is `'advi+adapt_diag'`, but this can change in
the future. If you depend on the exact behaviour, choose an
initialization method explicitly.
* adapt_diag : Start with a identity mass matrix and then adapt
a diagonal based on the variance of the tuning samples.
* advi+adapt_diag : Run ADVI and then adapt the resulting diagonal
mass matrix based on the sample variance of the tuning samples.
* advi+adapt_diag_grad : Run ADVI and then adapt the resulting
diagonal mass matrix based on the variance of the gradients
during tuning. This is **experimental** and might be removed
in a future release.
* advi : Run ADVI to estimate posterior mean and diagonal mass
matrix.
* advi_map: Initialize ADVI with MAP and use MAP as starting point.
* map : Use the MAP as starting point. This is discouraged.
* nuts : Run NUTS and estimate posterior mean and mass matrix from
the trace.
njobs : int
Number of parallel jobs to start.
n_init : int
Number of iterations of initializer
If 'ADVI', number of iterations, if 'nuts', number of draws.
model : Model (optional if in `with` context)
progressbar : bool
Whether or not to display a progressbar for advi sampling.
**kwargs : keyword arguments
Extra keyword arguments are forwarded to pymc3.NUTS.
Returns
-------
start : pymc3.model.Point
Starting point for sampler
nuts_sampler : pymc3.step_methods.NUTS
Instantiated and initialized NUTS sampler object
"""
model = pm.modelcontext(model)
vars = kwargs.get("vars", model.vars)
if set(vars) != set(model.vars):
raise ValueError("Must use init_nuts on all variables of a model.")
if not pm.model.all_continuous(vars):
raise ValueError(
"init_nuts can only be used for models with only continuous variables."
)
if not isinstance(init, str):
raise TypeError("init must be a string.")
if init is not None:
init = init.lower()
if init == "auto":
init = "advi+adapt_diag"
pm._log.info("Initializing NUTS using {}...".format(init))
random_seed = int(np.atleast_1d(random_seed)[0])
cb = [
pm.callbacks.CheckParametersConvergence(tolerance=1e-2, diff="absolute"),
pm.callbacks.CheckParametersConvergence(tolerance=1e-2, diff="relative"),
]
if init == "adapt_diag":
start = [model.test_point] * njobs
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
var = np.ones_like(mean)
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, var, 10)
if njobs == 1:
start = start[0]
elif init == "advi+adapt_diag_grad":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
)
start = approx.sample(draws=njobs)
start = list(start)
stds = approx.gbij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
mean = approx.gbij.rmap(approx.mean.get_value())
mean = model.dict_to_array(mean)
weight = 50
potential = quadpotential.QuadPotentialDiagAdaptGrad(
model.ndim, mean, cov, weight
)
if njobs == 1:
start = start[0]
elif init == "advi+adapt_diag":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
)
start = approx.sample(draws=njobs)
start = list(start)
stds = approx.gbij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
mean = approx.gbij.rmap(approx.mean.get_value())
mean = model.dict_to_array(mean)
weight = 50
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, cov, weight)
if njobs == 1:
start = start[0]
elif init == "advi":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
) # type: pm.MeanField
start = approx.sample(draws=njobs)
start = list(start)
stds = approx.gbij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
potential = quadpotential.QuadPotentialDiag(cov)
if njobs == 1:
start = start[0]
elif init == "advi_map":
start = pm.find_MAP()
approx = pm.MeanField(model=model, start=start)
pm.fit(
random_seed=random_seed,
n=n_init,
method=pm.ADVI.from_mean_field(approx),
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
)
start = approx.sample(draws=njobs)
start = list(start)
stds = approx.gbij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
potential = quadpotential.QuadPotentialDiag(cov)
if njobs == 1:
start = start[0]
elif init == "map":
start = pm.find_MAP()
cov = pm.find_hessian(point=start)
start = [start] * njobs
potential = quadpotential.QuadPotentialFull(cov)
if njobs == 1:
start = start[0]
elif init == "nuts":
init_trace = pm.sample(
draws=n_init, step=pm.NUTS(), tune=n_init // 2, random_seed=random_seed
)
cov = np.atleast_1d(pm.trace_cov(init_trace))
start = list(np.random.choice(init_trace, njobs))
potential = quadpotential.QuadPotentialFull(cov)
if njobs == 1:
start = start[0]
else:
raise NotImplementedError("Initializer {} is not supported.".format(init))
step = pm.NUTS(potential=potential, **kwargs)
return start, step
|
def init_nuts(
init="auto",
njobs=1,
n_init=500000,
model=None,
random_seed=-1,
progressbar=True,
**kwargs,
):
"""Set up the mass matrix initialization for NUTS.
NUTS convergence and sampling speed is extremely dependent on the
choice of mass/scaling matrix. This function implements different
methods for choosing or adapting the mass matrix.
Parameters
----------
init : str
Initialization method to use.
* auto : Choose a default initialization method automatically.
Currently, this is `'advi+adapt_diag'`, but this can change in
the future. If you depend on the exact behaviour, choose an
initialization method explicitly.
* adapt_diag : Start with a identity mass matrix and then adapt
a diagonal based on the variance of the tuning samples.
* advi+adapt_diag : Run ADVI and then adapt the resulting diagonal
mass matrix based on the sample variance of the tuning samples.
* advi+adapt_diag_grad : Run ADVI and then adapt the resulting
diagonal mass matrix based on the variance of the gradients
during tuning. This is **experimental** and might be removed
in a future release.
* advi : Run ADVI to estimate posterior mean and diagonal mass
matrix.
* advi_map: Initialize ADVI with MAP and use MAP as starting point.
* map : Use the MAP as starting point. This is discouraged.
* nuts : Run NUTS and estimate posterior mean and mass matrix from
the trace.
njobs : int
Number of parallel jobs to start.
n_init : int
Number of iterations of initializer
If 'ADVI', number of iterations, if 'nuts', number of draws.
model : Model (optional if in `with` context)
progressbar : bool
Whether or not to display a progressbar for advi sampling.
**kwargs : keyword arguments
Extra keyword arguments are forwarded to pymc3.NUTS.
Returns
-------
start : pymc3.model.Point
Starting point for sampler
nuts_sampler : pymc3.step_methods.NUTS
Instantiated and initialized NUTS sampler object
"""
model = pm.modelcontext(model)
vars = kwargs.get("vars", model.vars)
if set(vars) != set(model.vars):
raise ValueError("Must use init_nuts on all variables of a model.")
if not pm.model.all_continuous(vars):
raise ValueError(
"init_nuts can only be used for models with only continuous variables."
)
if not isinstance(init, str):
raise TypeError("init must be a string.")
if init is not None:
init = init.lower()
if init == "auto":
init = "advi+adapt_diag"
pm._log.info("Initializing NUTS using {}...".format(init))
random_seed = int(np.atleast_1d(random_seed)[0])
cb = [
pm.callbacks.CheckParametersConvergence(tolerance=1e-2, diff="absolute"),
pm.callbacks.CheckParametersConvergence(tolerance=1e-2, diff="relative"),
]
if init == "adapt_diag":
start = []
for _ in range(njobs):
vals = distribution.draw_values(model.free_RVs)
point = {var.name: vals[i] for i, var in enumerate(model.free_RVs)}
start.append(point)
mean = np.mean([model.dict_to_array(vals) for vals in start], axis=0)
var = np.ones_like(mean)
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, var, 10)
if njobs == 1:
start = start[0]
elif init == "advi+adapt_diag_grad":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
)
start = approx.sample(draws=njobs)
start = list(start)
stds = approx.gbij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
mean = approx.gbij.rmap(approx.mean.get_value())
mean = model.dict_to_array(mean)
weight = 50
potential = quadpotential.QuadPotentialDiagAdaptGrad(
model.ndim, mean, cov, weight
)
if njobs == 1:
start = start[0]
elif init == "advi+adapt_diag":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
)
start = approx.sample(draws=njobs)
start = list(start)
stds = approx.gbij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
mean = approx.gbij.rmap(approx.mean.get_value())
mean = model.dict_to_array(mean)
weight = 50
potential = quadpotential.QuadPotentialDiagAdapt(model.ndim, mean, cov, weight)
if njobs == 1:
start = start[0]
elif init == "advi":
approx = pm.fit(
random_seed=random_seed,
n=n_init,
method="advi",
model=model,
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
) # type: pm.MeanField
start = approx.sample(draws=njobs)
start = list(start)
stds = approx.gbij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
potential = quadpotential.QuadPotentialDiag(cov)
if njobs == 1:
start = start[0]
elif init == "advi_map":
start = pm.find_MAP()
approx = pm.MeanField(model=model, start=start)
pm.fit(
random_seed=random_seed,
n=n_init,
method=pm.ADVI.from_mean_field(approx),
callbacks=cb,
progressbar=progressbar,
obj_optimizer=pm.adagrad_window,
)
start = approx.sample(draws=njobs)
start = list(start)
stds = approx.gbij.rmap(approx.std.eval())
cov = model.dict_to_array(stds) ** 2
potential = quadpotential.QuadPotentialDiag(cov)
if njobs == 1:
start = start[0]
elif init == "map":
start = pm.find_MAP()
cov = pm.find_hessian(point=start)
start = [start] * njobs
potential = quadpotential.QuadPotentialFull(cov)
if njobs == 1:
start = start[0]
elif init == "nuts":
init_trace = pm.sample(
draws=n_init, step=pm.NUTS(), tune=n_init // 2, random_seed=random_seed
)
cov = np.atleast_1d(pm.trace_cov(init_trace))
start = list(np.random.choice(init_trace, njobs))
potential = quadpotential.QuadPotentialFull(cov)
if njobs == 1:
start = start[0]
else:
raise NotImplementedError("Initializer {} is not supported.".format(init))
step = pm.NUTS(potential=potential, **kwargs)
return start, step
|
https://github.com/pymc-devs/pymc3/issues/2442
|
Traceback (most recent call last):
File "<ipython-input-10-aea93a5e8087>", line 5, in <module>
pm.sample(init='adapt_diag')
File "/home/laoj/Documents/Github/pymc3/pymc3/sampling.py", line 247, in sample
progressbar=progressbar, **args)
File "/home/laoj/Documents/Github/pymc3/pymc3/sampling.py", line 729, in init_nuts
vals = distribution.draw_values(model.free_RVs)
File "/home/laoj/Documents/Github/pymc3/pymc3/distributions/distribution.py", line 194, in draw_values
values.append(_draw_value(param, point=point, givens=givens.values()))
File "/home/laoj/Documents/Github/pymc3/pymc3/distributions/distribution.py", line 258, in _draw_value
func = _compile_theano_function(param, variables)
File "/home/laoj/Documents/Github/pymc3/pymc3/memoize.py", line 16, in memoizer
cache[key] = obj(*args, **kwargs)
File "/home/laoj/Documents/Github/pymc3/pymc3/distributions/distribution.py", line 220, in _compile_theano_function
allow_input_downcast=True)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function.py", line 326, in function
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/pfunc.py", line 486, in pfunc
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1808, in orig_function
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1446, in __init__
accept_inplace)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 177, in std_fgraph
update_mapping=update_mapping)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 175, in __init__
self.__import_r__(output, reason="init")
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 356, in __import_r__
raise MissingInputError("Undeclared input", variable=variable)
MissingInputError: Undeclared input
|
MissingInputError
|
def __init__(
self,
n,
initial_mean,
initial_diag=None,
initial_weight=0,
adaptation_window=100,
dtype=None,
):
"""Set up a diagonal mass matrix."""
if initial_diag is not None and initial_diag.ndim != 1:
raise ValueError("Initial diagonal must be one-dimensional.")
if initial_mean.ndim != 1:
raise ValueError("Initial mean must be one-dimensional.")
if initial_diag is not None and len(initial_diag) != n:
raise ValueError(
"Wrong shape for initial_diag: expected %s got %s" % (n, len(initial_diag))
)
if len(initial_mean) != n:
raise ValueError(
"Wrong shape for initial_mean: expected %s got %s" % (n, len(initial_mean))
)
if dtype is None:
dtype = theano.config.floatX
if initial_diag is None:
initial_diag = np.ones(n, dtype=dtype)
initial_weight = 1
self.dtype = dtype
self._n = n
self._var = np.array(initial_diag, dtype=self.dtype, copy=True)
self._var_theano = theano.shared(self._var)
self._stds = np.sqrt(initial_diag)
self._inv_stds = floatX(1.0) / self._stds
self._foreground_var = _WeightedVariance(
self._n, initial_mean, initial_diag, initial_weight, self.dtype
)
self._background_var = _WeightedVariance(self._n, dtype=self.dtype)
self._n_samples = 0
self.adaptation_window = adaptation_window
|
def __init__(
self,
n,
initial_mean,
initial_diag=None,
initial_weight=0,
adaptation_window=100,
dtype=None,
):
"""Set up a diagonal mass matrix."""
if initial_diag is not None and initial_diag.ndim != 1:
raise ValueError("Initial diagonal must be one-dimensional.")
if initial_mean.ndim != 1:
raise ValueError("Initial mean must be one-dimensional.")
if initial_diag is not None and len(initial_diag) != n:
raise ValueError(
"Wrong shape for initial_diag: expected %s got %s" % (n, len(initial_diag))
)
if len(initial_mean) != n:
raise ValueError(
"Wrong shape for initial_mean: expected %s got %s" % (n, len(initial_mean))
)
if initial_diag is None:
initial_diag = np.ones(n, dtype=theano.config.floatX)
initial_weight = 1
if dtype is None:
dtype = theano.config.floatX
self.dtype = dtype
self._n = n
self._var = np.array(initial_diag, dtype=self.dtype, copy=True)
self._var_theano = theano.shared(self._var)
self._stds = np.sqrt(initial_diag)
self._inv_stds = floatX(1.0) / self._stds
self._foreground_var = _WeightedVariance(
self._n, initial_mean, initial_diag, initial_weight, self.dtype
)
self._background_var = _WeightedVariance(self._n, dtype=self.dtype)
self._n_samples = 0
self.adaptation_window = adaptation_window
|
https://github.com/pymc-devs/pymc3/issues/2442
|
Traceback (most recent call last):
File "<ipython-input-10-aea93a5e8087>", line 5, in <module>
pm.sample(init='adapt_diag')
File "/home/laoj/Documents/Github/pymc3/pymc3/sampling.py", line 247, in sample
progressbar=progressbar, **args)
File "/home/laoj/Documents/Github/pymc3/pymc3/sampling.py", line 729, in init_nuts
vals = distribution.draw_values(model.free_RVs)
File "/home/laoj/Documents/Github/pymc3/pymc3/distributions/distribution.py", line 194, in draw_values
values.append(_draw_value(param, point=point, givens=givens.values()))
File "/home/laoj/Documents/Github/pymc3/pymc3/distributions/distribution.py", line 258, in _draw_value
func = _compile_theano_function(param, variables)
File "/home/laoj/Documents/Github/pymc3/pymc3/memoize.py", line 16, in memoizer
cache[key] = obj(*args, **kwargs)
File "/home/laoj/Documents/Github/pymc3/pymc3/distributions/distribution.py", line 220, in _compile_theano_function
allow_input_downcast=True)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function.py", line 326, in function
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/pfunc.py", line 486, in pfunc
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1808, in orig_function
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1446, in __init__
accept_inplace)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 177, in std_fgraph
update_mapping=update_mapping)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 175, in __init__
self.__import_r__(output, reason="init")
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 356, in __import_r__
raise MissingInputError("Undeclared input", variable=variable)
MissingInputError: Undeclared input
|
MissingInputError
|
def random(self, point=None, size=None, repeat=None):
def random_choice(*args, **kwargs):
w = kwargs.pop("w")
w /= w.sum(axis=-1, keepdims=True)
k = w.shape[-1]
if w.ndim > 1:
return np.row_stack([np.random.choice(k, p=w_) for w_ in w])
else:
return np.random.choice(k, p=w, *args, **kwargs)
w = draw_values([self.w], point=point)[0]
w_samples = generate_samples(
random_choice,
w=w,
broadcast_shape=w.shape[:-1] or (1,),
dist_shape=self.shape,
size=size,
).squeeze()
comp_samples = self._comp_samples(point=point, size=size, repeat=repeat)
if comp_samples.ndim > 1:
return np.squeeze(comp_samples[np.arange(w_samples.size), w_samples])
else:
return np.squeeze(comp_samples[w_samples])
|
def random(self, point=None, size=None, repeat=None):
def random_choice(*args, **kwargs):
w = kwargs.pop("w")
w /= w.sum(axis=-1, keepdims=True)
k = w.shape[-1]
if w.ndim > 1:
return np.row_stack([np.random.choice(k, p=w_) for w_ in w])
else:
return np.random.choice(k, p=w, *args, **kwargs)
w = draw_values([self.w], point=point)
w_samples = generate_samples(
random_choice,
w=w,
broadcast_shape=w.shape[:-1] or (1,),
dist_shape=self.shape,
size=size,
).squeeze()
comp_samples = self._comp_samples(point=point, size=size, repeat=repeat)
if comp_samples.ndim > 1:
return np.squeeze(comp_samples[np.arange(w_samples.size), w_samples])
else:
return np.squeeze(comp_samples[w_samples])
|
https://github.com/pymc-devs/pymc3/issues/2346
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-16-2fd0a2e33b32> in <module>()
5
6 with model:
----> 7 pp_trace = pm.sample_ppc(trace, PP_SAMPLES, random_seed=SEED)
/Users/fonnescj/Repos/pymc3/pymc3/sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
537 param = trace[idx]
538 for var in vars:
--> 539 vals = var.distribution.random(point=param, size=size)
540 ppc[var.name].append(vals)
541 finally:
/Users/fonnescj/Repos/pymc3/pymc3/distributions/mixture.py in random(self, point, size, repeat)
130 w_samples = generate_samples(random_choice,
131 w=w,
--> 132 broadcast_shape=w.shape[:-1] or (1,),
133 dist_shape=self.shape,
134 size=size).squeeze()
AttributeError: 'list' object has no attribute 'shape'
|
AttributeError
|
def __init__(self, dist, transform, *args, **kwargs):
"""
Parameters
----------
dist : Distribution
transform : Transform
args, kwargs
arguments to Distribution"""
forward = transform.forward
testval = forward(dist.default())
forward_val = transform.forward_val
self.dist = dist
self.transform_used = transform
v = forward(FreeRV(name="v", distribution=dist))
self.type = v.type
super(TransformedDistribution, self).__init__(
v.shape.tag.test_value, v.dtype, testval, dist.defaults, *args, **kwargs
)
if transform.name == "stickbreaking":
b = np.hstack(((np.atleast_1d(self.shape) == 1)[:-1], False))
# force the last dim not broadcastable
self.type = tt.TensorType(v.dtype, b)
|
def __init__(self, dist, transform, *args, **kwargs):
"""
Parameters
----------
dist : Distribution
transform : Transform
args, kwargs
arguments to Distribution"""
forward = transform.forward
testval = forward(dist.default())
self.dist = dist
self.transform_used = transform
v = forward(FreeRV(name="v", distribution=dist))
self.type = v.type
super(TransformedDistribution, self).__init__(
v.shape.tag.test_value, v.dtype, testval, dist.defaults, *args, **kwargs
)
if transform.name == "stickbreaking":
b = np.hstack(((np.atleast_1d(self.shape) == 1)[:-1], False))
# force the last dim not broadcastable
self.type = tt.TensorType(v.dtype, b)
|
https://github.com/pymc-devs/pymc3/issues/2258
|
Traceback (most recent call last):
File "<ipython-input-1-e7f2b743f1a1>", line 5, in <module>
pm.sample(1000)
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 273, in sample
return sample_func(**sample_args)[discard:]
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 288, in _sample
for it, strace in enumerate(sampling):
File "/usr/local/lib/python3.5/dist-packages/tqdm/_tqdm.py", line 862, in __iter__
for obj in iterable:
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 367, in _iter_sample
_update_start_vals(start, model.test_point, model)
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 483, in _update_start_vals
b[tname] = transform_func[0].forward(a[name]).eval()
File "/usr/local/lib/python3.5/dist-packages/theano/gof/graph.py", line 516, in eval
self._fn_cache[inputs] = theano.function(inputs, self)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function.py", line 326, in function
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/pfunc.py", line 486, in pfunc
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1807, in orig_function
output_keys=output_keys).create(
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1446, in __init__
accept_inplace)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 177, in std_fgraph
update_mapping=update_mapping)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 174, in __init__
self.__import_r__(output, reason="init")
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 345, in __import_r__
self.__import__(variable.owner, reason=reason)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 390, in __import__
raise MissingInputError(error_msg, variable=r)
MissingInputError: Input 0 of the graph (indices start from 0), used to compute sigmoid(a1_interval__), was not provided and not given a value. Use the Theano flag exception_verbosity='high', for more information on this error.
|
MissingInputError
|
def forward(self, x):
a = self.a
return tt.log(x - a)
|
def forward(self, x):
a = self.a
r = tt.log(x - a)
return r
|
https://github.com/pymc-devs/pymc3/issues/2258
|
Traceback (most recent call last):
File "<ipython-input-1-e7f2b743f1a1>", line 5, in <module>
pm.sample(1000)
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 273, in sample
return sample_func(**sample_args)[discard:]
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 288, in _sample
for it, strace in enumerate(sampling):
File "/usr/local/lib/python3.5/dist-packages/tqdm/_tqdm.py", line 862, in __iter__
for obj in iterable:
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 367, in _iter_sample
_update_start_vals(start, model.test_point, model)
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 483, in _update_start_vals
b[tname] = transform_func[0].forward(a[name]).eval()
File "/usr/local/lib/python3.5/dist-packages/theano/gof/graph.py", line 516, in eval
self._fn_cache[inputs] = theano.function(inputs, self)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function.py", line 326, in function
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/pfunc.py", line 486, in pfunc
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1807, in orig_function
output_keys=output_keys).create(
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1446, in __init__
accept_inplace)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 177, in std_fgraph
update_mapping=update_mapping)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 174, in __init__
self.__import_r__(output, reason="init")
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 345, in __import_r__
self.__import__(variable.owner, reason=reason)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 390, in __import__
raise MissingInputError(error_msg, variable=r)
MissingInputError: Input 0 of the graph (indices start from 0), used to compute sigmoid(a1_interval__), was not provided and not given a value. Use the Theano flag exception_verbosity='high', for more information on this error.
|
MissingInputError
|
def forward(self, x):
b = self.b
return tt.log(b - x)
|
def forward(self, x):
b = self.b
r = tt.log(b - x)
return r
|
https://github.com/pymc-devs/pymc3/issues/2258
|
Traceback (most recent call last):
File "<ipython-input-1-e7f2b743f1a1>", line 5, in <module>
pm.sample(1000)
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 273, in sample
return sample_func(**sample_args)[discard:]
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 288, in _sample
for it, strace in enumerate(sampling):
File "/usr/local/lib/python3.5/dist-packages/tqdm/_tqdm.py", line 862, in __iter__
for obj in iterable:
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 367, in _iter_sample
_update_start_vals(start, model.test_point, model)
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 483, in _update_start_vals
b[tname] = transform_func[0].forward(a[name]).eval()
File "/usr/local/lib/python3.5/dist-packages/theano/gof/graph.py", line 516, in eval
self._fn_cache[inputs] = theano.function(inputs, self)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function.py", line 326, in function
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/pfunc.py", line 486, in pfunc
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1807, in orig_function
output_keys=output_keys).create(
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1446, in __init__
accept_inplace)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 177, in std_fgraph
update_mapping=update_mapping)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 174, in __init__
self.__import_r__(output, reason="init")
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 345, in __import_r__
self.__import__(variable.owner, reason=reason)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 390, in __import__
raise MissingInputError(error_msg, variable=r)
MissingInputError: Input 0 of the graph (indices start from 0), used to compute sigmoid(a1_interval__), was not provided and not given a value. Use the Theano flag exception_verbosity='high', for more information on this error.
|
MissingInputError
|
def _update_start_vals(a, b, model):
"""Update a with b, without overwriting existing keys. Values specified for
transformed variables on the original scale are also transformed and inserted.
"""
if model is not None:
for free_RV in model.free_RVs:
tname = free_RV.name
for name in a:
if is_transformed_name(tname) and get_untransformed_name(tname) == name:
transform_func = [
d.transformation for d in model.deterministics if d.name == name
]
if transform_func:
b[tname] = (
transform_func[0].forward_val(a[name], point=b).eval()
)
a.update({k: v for k, v in b.items() if k not in a})
|
def _update_start_vals(a, b, model):
"""Update a with b, without overwriting existing keys. Values specified for
transformed variables on the original scale are also transformed and inserted.
"""
for name in a:
for tname in b:
if is_transformed_name(tname) and get_untransformed_name(tname) == name:
transform_func = [
d.transformation for d in model.deterministics if d.name == name
]
if transform_func:
b[tname] = transform_func[0].forward(a[name]).eval()
a.update({k: v for k, v in b.items() if k not in a})
|
https://github.com/pymc-devs/pymc3/issues/2258
|
Traceback (most recent call last):
File "<ipython-input-1-e7f2b743f1a1>", line 5, in <module>
pm.sample(1000)
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 273, in sample
return sample_func(**sample_args)[discard:]
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 288, in _sample
for it, strace in enumerate(sampling):
File "/usr/local/lib/python3.5/dist-packages/tqdm/_tqdm.py", line 862, in __iter__
for obj in iterable:
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 367, in _iter_sample
_update_start_vals(start, model.test_point, model)
File "/usr/local/lib/python3.5/dist-packages/pymc3/sampling.py", line 483, in _update_start_vals
b[tname] = transform_func[0].forward(a[name]).eval()
File "/usr/local/lib/python3.5/dist-packages/theano/gof/graph.py", line 516, in eval
self._fn_cache[inputs] = theano.function(inputs, self)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function.py", line 326, in function
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/pfunc.py", line 486, in pfunc
output_keys=output_keys)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1807, in orig_function
output_keys=output_keys).create(
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 1446, in __init__
accept_inplace)
File "/usr/local/lib/python3.5/dist-packages/theano/compile/function_module.py", line 177, in std_fgraph
update_mapping=update_mapping)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 174, in __init__
self.__import_r__(output, reason="init")
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 345, in __import_r__
self.__import__(variable.owner, reason=reason)
File "/usr/local/lib/python3.5/dist-packages/theano/gof/fg.py", line 390, in __import__
raise MissingInputError(error_msg, variable=r)
MissingInputError: Input 0 of the graph (indices start from 0), used to compute sigmoid(a1_interval__), was not provided and not given a value. Use the Theano flag exception_verbosity='high', for more information on this error.
|
MissingInputError
|
def random(self, point=None, size=None, repeat=None):
sd = draw_values([self.sd], point=point)[0]
return generate_samples(
stats.halfnorm.rvs, loc=0.0, scale=sd, dist_shape=self.shape, size=size
)
|
def random(self, point=None, size=None, repeat=None):
sd = draw_values([self.sd], point=point)
return generate_samples(
stats.halfnorm.rvs, loc=0.0, scale=sd, dist_shape=self.shape, size=size
)
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def random(self, point=None, size=None, repeat=None):
lam = draw_values([self.lam], point=point)[0]
return generate_samples(
np.random.exponential, scale=1.0 / lam, dist_shape=self.shape, size=size
)
|
def random(self, point=None, size=None, repeat=None):
lam = draw_values([self.lam], point=point)
return generate_samples(
np.random.exponential, scale=1.0 / lam, dist_shape=self.shape, size=size
)
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def random(self, point=None, size=None, repeat=None):
beta = draw_values([self.beta], point=point)[0]
return generate_samples(self._random, beta, dist_shape=self.shape, size=size)
|
def random(self, point=None, size=None, repeat=None):
beta = draw_values([self.beta], point=point)
return generate_samples(self._random, beta, dist_shape=self.shape, size=size)
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def random(self, point=None, size=None, repeat=None):
p = draw_values([self.p], point=point)[0]
return generate_samples(stats.bernoulli.rvs, p, dist_shape=self.shape, size=size)
|
def random(self, point=None, size=None, repeat=None):
p = draw_values([self.p], point=point)
return generate_samples(stats.bernoulli.rvs, p, dist_shape=self.shape, size=size)
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def random(self, point=None, size=None, repeat=None):
mu = draw_values([self.mu], point=point)[0]
return generate_samples(stats.poisson.rvs, mu, dist_shape=self.shape, size=size)
|
def random(self, point=None, size=None, repeat=None):
mu = draw_values([self.mu], point=point)
return generate_samples(stats.poisson.rvs, mu, dist_shape=self.shape, size=size)
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def random(self, point=None, size=None, repeat=None):
p = draw_values([self.p], point=point)[0]
return generate_samples(np.random.geometric, p, dist_shape=self.shape, size=size)
|
def random(self, point=None, size=None, repeat=None):
p = draw_values([self.p], point=point)
return generate_samples(np.random.geometric, p, dist_shape=self.shape, size=size)
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def random(self, point=None, size=None, repeat=None):
c = draw_values([self.c], point=point)[0]
dtype = np.array(c).dtype
def _random(c, dtype=dtype, size=None):
return np.full(size, fill_value=c, dtype=dtype)
return generate_samples(_random, c=c, dist_shape=self.shape, size=size).astype(
dtype
)
|
def random(self, point=None, size=None, repeat=None):
c = draw_values([self.c], point=point)
dtype = np.array(c).dtype
def _random(c, dtype=dtype, size=None):
return np.full(size, fill_value=c, dtype=dtype)
return generate_samples(_random, c=c, dist_shape=self.shape, size=size).astype(
dtype
)
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def draw_values(params, point=None):
"""
Draw (fix) parameter values. Handles a number of cases:
1) The parameter is a scalar
2) The parameter is an *RV
a) parameter can be fixed to the value in the point
b) parameter can be fixed by sampling from the *RV
c) parameter can be fixed using tag.test_value (last resort)
3) The parameter is a tensor variable/constant. Can be evaluated using
theano.function, but a variable may contain nodes which
a) are named parameters in the point
b) are *RVs with a random method
"""
# Distribution parameters may be nodes which have named node-inputs
# specified in the point. Need to find the node-inputs to replace them.
givens = {}
for param in params:
if hasattr(param, "name"):
named_nodes = get_named_nodes(param)
if param.name in named_nodes:
named_nodes.pop(param.name)
for name, node in named_nodes.items():
if not isinstance(
node, (tt.sharedvar.SharedVariable, tt.TensorConstant)
):
givens[name] = (node, _draw_value(node, point=point))
values = []
for param in params:
values.append(_draw_value(param, point=point, givens=givens.values()))
return values
|
def draw_values(params, point=None):
"""
Draw (fix) parameter values. Handles a number of cases:
1) The parameter is a scalar
2) The parameter is an *RV
a) parameter can be fixed to the value in the point
b) parameter can be fixed by sampling from the *RV
c) parameter can be fixed using tag.test_value (last resort)
3) The parameter is a tensor variable/constant. Can be evaluated using
theano.function, but a variable may contain nodes which
a) are named parameters in the point
b) are *RVs with a random method
"""
# Distribution parameters may be nodes which have named node-inputs
# specified in the point. Need to find the node-inputs to replace them.
givens = {}
for param in params:
if hasattr(param, "name"):
named_nodes = get_named_nodes(param)
if param.name in named_nodes:
named_nodes.pop(param.name)
for name, node in named_nodes.items():
if not isinstance(
node, (tt.sharedvar.TensorSharedVariable, tt.TensorConstant)
):
givens[name] = (node, draw_value(node, point=point))
values = [None for _ in params]
for i, param in enumerate(params):
# "Homogonise" output
values[i] = np.atleast_1d(
draw_value(param, point=point, givens=givens.values())
)
if len(values) == 1:
return values[0]
else:
return values
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def random(self, point=None, size=None):
a = draw_values([self.a], point=point)[0]
def _random(a, size=None):
return stats.dirichlet.rvs(a, None if size == a.shape else size)
samples = generate_samples(_random, a, dist_shape=self.shape, size=size)
return samples
|
def random(self, point=None, size=None):
a = draw_values([self.a], point=point)
def _random(a, size=None):
return stats.dirichlet.rvs(a, None if size == a.shape else size)
samples = generate_samples(_random, a, dist_shape=self.shape, size=size)
return samples
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def astep(self, q0, logp):
"""q0 : current state
logp : log probability function
"""
# Draw from the normal prior by multiplying the Cholesky decomposition
# of the covariance with draws from a standard normal
chol = draw_values([self.prior_chol])[0]
nu = np.dot(chol, nr.randn(chol.shape[0]))
y = logp(q0) - nr.standard_exponential()
# Draw initial proposal and propose a candidate point
theta = nr.uniform(0, 2 * np.pi)
theta_max = theta
theta_min = theta - 2 * np.pi
q_new = q0 * np.cos(theta) + nu * np.sin(theta)
while logp(q_new) <= y:
# Shrink the bracket and propose a new point
if theta < 0:
theta_min = theta
else:
theta_max = theta
theta = nr.uniform(theta_min, theta_max)
q_new = q0 * np.cos(theta) + nu * np.sin(theta)
return q_new
|
def astep(self, q0, logp):
"""q0 : current state
logp : log probability function
"""
# Draw from the normal prior by multiplying the Cholesky decomposition
# of the covariance with draws from a standard normal
chol = draw_values([self.prior_chol])
nu = np.dot(chol, nr.randn(chol.shape[0]))
y = logp(q0) - nr.standard_exponential()
# Draw initial proposal and propose a candidate point
theta = nr.uniform(0, 2 * np.pi)
theta_max = theta
theta_min = theta - 2 * np.pi
q_new = q0 * np.cos(theta) + nu * np.sin(theta)
while logp(q_new) <= y:
# Shrink the bracket and propose a new point
if theta < 0:
theta_min = theta
else:
theta_max = theta
theta = nr.uniform(theta_min, theta_max)
q_new = q0 * np.cos(theta) + nu * np.sin(theta)
return q_new
|
https://github.com/pymc-devs/pymc3/issues/2307
|
TypeError Traceback (most recent call last)
in ()
1 ann_input.set_value(X_test)
2 ann_output.set_value(Y_test)
----> 3 ppc = pm.sample_ppc(trace, model=neural_network, samples=500, progressbar=False)
4
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\sampling.py in sample_ppc(trace, samples, model, vars, size, random_seed, progressbar)
526 for var in vars:
527 ppc[var.name].append(var.distribution.random(point=param,
--> 528 size=size))
529
530 return {k: np.asarray(v) for k, v in ppc.items()}
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\discrete.py in random(self, point, size, repeat)
152
153 def random(self, point=None, size=None, repeat=None):
--> 154 p = draw_values([self.p], point=point)
155 return generate_samples(stats.bernoulli.rvs, p,
156 dist_shape=self.shape,
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_values(params, point)
183 if not isinstance(node, (tt.sharedvar.TensorSharedVariable,
184 tt.TensorConstant)):
--> 185 givens[name] = (node, draw_value(node, point=point))
186 values = [None for _ in params]
187 for i, param in enumerate(params):
C:\Users\Nikos\Documents\Lasagne\python-3.4.4.amd64\lib\site-packages\pymc3\distributions\distribution.py in draw_value(param, point, givens)
251 except:
252 shape = param.shape
--> 253 if len(shape) == 0 and len(value) == 1:
254 value = value[0]
255 return value
TypeError: object of type 'TensorVariable' has no len()
|
TypeError
|
def _slice(self, idx):
with self.activate_file:
start, stop, step = idx.indices(len(self))
sliced = ndarray.NDArray(model=self.model, vars=self.vars)
sliced.chain = self.chain
sliced.samples = {v: self.samples[v][start:stop:step] for v in self.varnames}
sliced.draw_idx = (stop - start) // step
return sliced
|
def _slice(self, idx):
with self.activate_file:
if idx.start is None:
burn = 0
else:
burn = idx.start
if idx.step is None:
thin = 1
else:
thin = idx.step
sliced = ndarray.NDArray(model=self.model, vars=self.vars)
sliced.chain = self.chain
sliced.samples = {
v: self.get_values(v, burn=burn, thin=thin) for v in self.varnames
}
return sliced
|
https://github.com/pymc-devs/pymc3/issues/1906
|
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import numpy as np\n",
"import pymc3 as pm"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x = np.random.normal(1., 1., size=100)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"with pm.Model() as model:\n",
" mu = pm.Normal('mu', 0., 1e-2)\n",
" x_obs = pm.Normal('x_obs', mu, 1., observed=x)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Auto-assigning NUTS sampler...\n",
"Initializing NUTS using advi...\n",
"Average ELBO = -788.78: 14%|ββ | 28990/200000 [00:01<00:11, 14735.92it/s]Median ELBO converged.\n",
"Finished [100%]: Average ELBO = -193.47\n",
"\n",
"100%|ββββββββββ| 500/500 [00:00<00:00, 2269.32it/s]\n"
]
}
],
"source": [
"with model:\n",
" trace = pm.sample(500)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\r",
" 0%| | 0/500 [00:00<?, ?it/s]"
]
},
{
"ename": "IndexError",
"evalue": "index 223 is out of bounds for axis 0 with size 100",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-5-6711066867a2>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mpp_trace\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msample_ppc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrace\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/sampling.py\u001b[0m in \u001b[0;36msample_ppc\u001b[0;34m(trace, samples, model, vars, size, random_seed, progressbar)\u001b[0m\n\u001b[1;32m 412\u001b[0m \u001b[0mppc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdefaultdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 413\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0midx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mindices\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 414\u001b[0;31m \u001b[0mparam\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 415\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvar\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mvars\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 416\u001b[0m ppc[var.name].append(var.distribution.random(point=param,\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/base.py\u001b[0m in \u001b[0;36m__getitem__\u001b[0;34m(self, idx)\u001b[0m\n\u001b[1;32m 266\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 267\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 268\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 269\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mValueError\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTypeError\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# Passed variable or variable name.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 270\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/base.py\u001b[0m in \u001b[0;36mpoint\u001b[0;34m(self, idx, chain)\u001b[0m\n\u001b[1;32m 420\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mchain\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 421\u001b[0m \u001b[0mchain\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mchains\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 422\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_straces\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mchain\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 423\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 424\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/ndarray.py\u001b[0m in \u001b[0;36mpoint\u001b[0;34m(self, idx)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0midx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 174\u001b[0m return {varname: values[idx]\n\u001b[0;32m--> 175\u001b[0;31m for varname, values in self.samples.items()}\n\u001b[0m\u001b[1;32m 176\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/ndarray.py\u001b[0m in \u001b[0;36m<dictcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0midx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 174\u001b[0m return {varname: values[idx]\n\u001b[0;32m--> 175\u001b[0;31m for varname, values in self.samples.items()}\n\u001b[0m\u001b[1;32m 176\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mIndexError\u001b[0m: index 223 is out of bounds for axis 0 with size 100"
]
}
],
"source": [
"with model:\n",
" pp_trace = pm.sample_ppc(trace[::5])"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"500"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(trace[::5])"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"(100,)"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"trace[::5]['mu'].shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
|
IndexError
|
def _slice(self, idx):
# Slicing directly instead of using _slice_as_ndarray to
# support stop value in slice (which is needed by
# iter_sample).
# Only the first `draw_idx` value are valid because of preallocation
idx = slice(*idx.indices(len(self)))
sliced = NDArray(model=self.model, vars=self.vars)
sliced.chain = self.chain
sliced.samples = {varname: values[idx] for varname, values in self.samples.items()}
sliced.sampler_vars = self.sampler_vars
sliced.draw_idx = (idx.stop - idx.start) // idx.step
if self._stats is None:
return sliced
sliced._stats = []
for vars in self._stats:
var_sliced = {}
sliced._stats.append(var_sliced)
for key, vals in vars.items():
var_sliced[key] = vals[idx]
return sliced
|
def _slice(self, idx):
# Slicing directly instead of using _slice_as_ndarray to
# support stop value in slice (which is needed by
# iter_sample).
# Only the first `draw_idx` value are valid because of preallocation
idx = slice(*idx.indices(len(self)))
sliced = NDArray(model=self.model, vars=self.vars)
sliced.chain = self.chain
sliced.samples = {varname: values[idx] for varname, values in self.samples.items()}
sliced.sampler_vars = self.sampler_vars
if self._stats is None:
return sliced
sliced._stats = []
for vars in self._stats:
var_sliced = {}
sliced._stats.append(var_sliced)
for key, vals in vars.items():
var_sliced[key] = vals[idx]
sliced.draw_idx = idx.stop - idx.start
return sliced
|
https://github.com/pymc-devs/pymc3/issues/1906
|
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import numpy as np\n",
"import pymc3 as pm"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x = np.random.normal(1., 1., size=100)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"with pm.Model() as model:\n",
" mu = pm.Normal('mu', 0., 1e-2)\n",
" x_obs = pm.Normal('x_obs', mu, 1., observed=x)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Auto-assigning NUTS sampler...\n",
"Initializing NUTS using advi...\n",
"Average ELBO = -788.78: 14%|ββ | 28990/200000 [00:01<00:11, 14735.92it/s]Median ELBO converged.\n",
"Finished [100%]: Average ELBO = -193.47\n",
"\n",
"100%|ββββββββββ| 500/500 [00:00<00:00, 2269.32it/s]\n"
]
}
],
"source": [
"with model:\n",
" trace = pm.sample(500)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\r",
" 0%| | 0/500 [00:00<?, ?it/s]"
]
},
{
"ename": "IndexError",
"evalue": "index 223 is out of bounds for axis 0 with size 100",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-5-6711066867a2>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mpp_trace\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msample_ppc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrace\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/sampling.py\u001b[0m in \u001b[0;36msample_ppc\u001b[0;34m(trace, samples, model, vars, size, random_seed, progressbar)\u001b[0m\n\u001b[1;32m 412\u001b[0m \u001b[0mppc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdefaultdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 413\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0midx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mindices\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 414\u001b[0;31m \u001b[0mparam\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 415\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvar\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mvars\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 416\u001b[0m ppc[var.name].append(var.distribution.random(point=param,\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/base.py\u001b[0m in \u001b[0;36m__getitem__\u001b[0;34m(self, idx)\u001b[0m\n\u001b[1;32m 266\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 267\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 268\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 269\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mValueError\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTypeError\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# Passed variable or variable name.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 270\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/base.py\u001b[0m in \u001b[0;36mpoint\u001b[0;34m(self, idx, chain)\u001b[0m\n\u001b[1;32m 420\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mchain\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 421\u001b[0m \u001b[0mchain\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mchains\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 422\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_straces\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mchain\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 423\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 424\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/ndarray.py\u001b[0m in \u001b[0;36mpoint\u001b[0;34m(self, idx)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0midx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 174\u001b[0m return {varname: values[idx]\n\u001b[0;32m--> 175\u001b[0;31m for varname, values in self.samples.items()}\n\u001b[0m\u001b[1;32m 176\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/ndarray.py\u001b[0m in \u001b[0;36m<dictcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0midx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 174\u001b[0m return {varname: values[idx]\n\u001b[0;32m--> 175\u001b[0;31m for varname, values in self.samples.items()}\n\u001b[0m\u001b[1;32m 176\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mIndexError\u001b[0m: index 223 is out of bounds for axis 0 with size 100"
]
}
],
"source": [
"with model:\n",
" pp_trace = pm.sample_ppc(trace[::5])"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"500"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(trace[::5])"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"(100,)"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"trace[::5]['mu'].shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
|
IndexError
|
def _slice_as_ndarray(strace, idx):
sliced = NDArray(model=strace.model, vars=strace.vars)
sliced.chain = strace.chain
# Happy path where we do not need to load everything from the trace
if (idx.step is None or idx.step >= 1) and (
idx.stop is None or idx.stop == len(strace)
):
start, stop, step = idx.indices(len(strace))
sliced.samples = {
v: strace.get_values(v, burn=idx.start, thin=idx.step)
for v in strace.varnames
}
sliced.draw_idx = (stop - start) // step
else:
start, stop, step = idx.indices(len(strace))
sliced.samples = {
v: strace.get_values(v)[start:stop:step] for v in strace.varnames
}
sliced.draw_idx = (stop - start) // step
return sliced
|
def _slice_as_ndarray(strace, idx):
if idx.start is None:
burn = 0
else:
burn = idx.start
if idx.step is None:
thin = 1
else:
thin = idx.step
sliced = NDArray(model=strace.model, vars=strace.vars)
sliced.chain = strace.chain
sliced.samples = {
v: strace.get_values(v, burn=burn, thin=thin) for v in strace.varnames
}
return sliced
|
https://github.com/pymc-devs/pymc3/issues/1906
|
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import numpy as np\n",
"import pymc3 as pm"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x = np.random.normal(1., 1., size=100)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"with pm.Model() as model:\n",
" mu = pm.Normal('mu', 0., 1e-2)\n",
" x_obs = pm.Normal('x_obs', mu, 1., observed=x)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Auto-assigning NUTS sampler...\n",
"Initializing NUTS using advi...\n",
"Average ELBO = -788.78: 14%|ββ | 28990/200000 [00:01<00:11, 14735.92it/s]Median ELBO converged.\n",
"Finished [100%]: Average ELBO = -193.47\n",
"\n",
"100%|ββββββββββ| 500/500 [00:00<00:00, 2269.32it/s]\n"
]
}
],
"source": [
"with model:\n",
" trace = pm.sample(500)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\r",
" 0%| | 0/500 [00:00<?, ?it/s]"
]
},
{
"ename": "IndexError",
"evalue": "index 223 is out of bounds for axis 0 with size 100",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-5-6711066867a2>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mpp_trace\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msample_ppc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrace\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/sampling.py\u001b[0m in \u001b[0;36msample_ppc\u001b[0;34m(trace, samples, model, vars, size, random_seed, progressbar)\u001b[0m\n\u001b[1;32m 412\u001b[0m \u001b[0mppc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdefaultdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 413\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0midx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mindices\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 414\u001b[0;31m \u001b[0mparam\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 415\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvar\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mvars\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 416\u001b[0m ppc[var.name].append(var.distribution.random(point=param,\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/base.py\u001b[0m in \u001b[0;36m__getitem__\u001b[0;34m(self, idx)\u001b[0m\n\u001b[1;32m 266\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 267\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 268\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 269\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mValueError\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTypeError\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# Passed variable or variable name.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 270\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/base.py\u001b[0m in \u001b[0;36mpoint\u001b[0;34m(self, idx, chain)\u001b[0m\n\u001b[1;32m 420\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mchain\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 421\u001b[0m \u001b[0mchain\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mchains\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 422\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_straces\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mchain\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 423\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 424\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/ndarray.py\u001b[0m in \u001b[0;36mpoint\u001b[0;34m(self, idx)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0midx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 174\u001b[0m return {varname: values[idx]\n\u001b[0;32m--> 175\u001b[0;31m for varname, values in self.samples.items()}\n\u001b[0m\u001b[1;32m 176\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/ndarray.py\u001b[0m in \u001b[0;36m<dictcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0midx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 174\u001b[0m return {varname: values[idx]\n\u001b[0;32m--> 175\u001b[0;31m for varname, values in self.samples.items()}\n\u001b[0m\u001b[1;32m 176\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mIndexError\u001b[0m: index 223 is out of bounds for axis 0 with size 100"
]
}
],
"source": [
"with model:\n",
" pp_trace = pm.sample_ppc(trace[::5])"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"500"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(trace[::5])"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"(100,)"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"trace[::5]['mu'].shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
|
IndexError
|
def get_values(self, varname, burn=0, thin=1):
"""Get values from trace.
Parameters
----------
varname : str
burn : int
thin : int
Returns
-------
A NumPy array
"""
if burn is None:
burn = 0
if thin is None:
thin = 1
if burn < 0:
burn = max(0, len(self) + burn)
if thin < 1:
raise ValueError("Only positive thin values are supported in SQLite backend.")
varname = str(varname)
statement_args = {"chain": self.chain}
if burn == 0 and thin == 1:
action = "select"
elif thin == 1:
action = "select_burn"
statement_args["burn"] = burn - 1
elif burn == 0:
action = "select_thin"
statement_args["thin"] = thin
else:
action = "select_burn_thin"
statement_args["burn"] = burn - 1
statement_args["thin"] = thin
self.db.connect()
shape = (-1,) + self.var_shapes[varname]
statement = TEMPLATES[action].format(table=varname)
self.db.cursor.execute(statement, statement_args)
values = _rows_to_ndarray(self.db.cursor)
return values.reshape(shape)
|
def get_values(self, varname, burn=0, thin=1):
"""Get values from trace.
Parameters
----------
varname : str
burn : int
thin : int
Returns
-------
A NumPy array
"""
if burn < 0:
burn = max(0, len(self) + burn)
if thin < 1:
raise ValueError("Only positive thin values are supported in SQLite backend.")
varname = str(varname)
statement_args = {"chain": self.chain}
if burn == 0 and thin == 1:
action = "select"
elif thin == 1:
action = "select_burn"
statement_args["burn"] = burn - 1
elif burn == 0:
action = "select_thin"
statement_args["thin"] = thin
else:
action = "select_burn_thin"
statement_args["burn"] = burn - 1
statement_args["thin"] = thin
self.db.connect()
shape = (-1,) + self.var_shapes[varname]
statement = TEMPLATES[action].format(table=varname)
self.db.cursor.execute(statement, statement_args)
values = _rows_to_ndarray(self.db.cursor)
return values.reshape(shape)
|
https://github.com/pymc-devs/pymc3/issues/1906
|
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import numpy as np\n",
"import pymc3 as pm"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x = np.random.normal(1., 1., size=100)"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"with pm.Model() as model:\n",
" mu = pm.Normal('mu', 0., 1e-2)\n",
" x_obs = pm.Normal('x_obs', mu, 1., observed=x)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Auto-assigning NUTS sampler...\n",
"Initializing NUTS using advi...\n",
"Average ELBO = -788.78: 14%|ββ | 28990/200000 [00:01<00:11, 14735.92it/s]Median ELBO converged.\n",
"Finished [100%]: Average ELBO = -193.47\n",
"\n",
"100%|ββββββββββ| 500/500 [00:00<00:00, 2269.32it/s]\n"
]
}
],
"source": [
"with model:\n",
" trace = pm.sample(500)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\r",
" 0%| | 0/500 [00:00<?, ?it/s]"
]
},
{
"ename": "IndexError",
"evalue": "index 223 is out of bounds for axis 0 with size 100",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-5-6711066867a2>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 2\u001b[0;31m \u001b[0mpp_trace\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msample_ppc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrace\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/sampling.py\u001b[0m in \u001b[0;36msample_ppc\u001b[0;34m(trace, samples, model, vars, size, random_seed, progressbar)\u001b[0m\n\u001b[1;32m 412\u001b[0m \u001b[0mppc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mdefaultdict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 413\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0midx\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mindices\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 414\u001b[0;31m \u001b[0mparam\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 415\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvar\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mvars\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 416\u001b[0m ppc[var.name].append(var.distribution.random(point=param,\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/base.py\u001b[0m in \u001b[0;36m__getitem__\u001b[0;34m(self, idx)\u001b[0m\n\u001b[1;32m 266\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 267\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 268\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 269\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mValueError\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mTypeError\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0;31m# Passed variable or variable name.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 270\u001b[0m \u001b[0;32mpass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/base.py\u001b[0m in \u001b[0;36mpoint\u001b[0;34m(self, idx, chain)\u001b[0m\n\u001b[1;32m 420\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mchain\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 421\u001b[0m \u001b[0mchain\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mchains\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 422\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_straces\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mchain\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mpoint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 423\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 424\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/ndarray.py\u001b[0m in \u001b[0;36mpoint\u001b[0;34m(self, idx)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0midx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 174\u001b[0m return {varname: values[idx]\n\u001b[0;32m--> 175\u001b[0;31m for varname, values in self.samples.items()}\n\u001b[0m\u001b[1;32m 176\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/backends/ndarray.py\u001b[0m in \u001b[0;36m<dictcomp>\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 173\u001b[0m \u001b[0midx\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0midx\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 174\u001b[0m return {varname: values[idx]\n\u001b[0;32m--> 175\u001b[0;31m for varname, values in self.samples.items()}\n\u001b[0m\u001b[1;32m 176\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 177\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mIndexError\u001b[0m: index 223 is out of bounds for axis 0 with size 100"
]
}
],
"source": [
"with model:\n",
" pp_trace = pm.sample_ppc(trace[::5])"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"500"
]
},
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"len(trace[::5])"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"(100,)"
]
},
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"trace[::5]['mu'].shape"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
|
IndexError
|
def __init__(self, lam, *args, **kwargs):
super(Exponential, self).__init__(*args, **kwargs)
self.lam = lam = tt.as_tensor_variable(lam)
self.mean = 1.0 / self.lam
self.median = self.mean * tt.log(2)
self.mode = tt.zeros_like(self.lam)
self.variance = self.lam**-2
assert_negative_support(lam, "lam", "Exponential")
|
def __init__(self, lam, *args, **kwargs):
super(Exponential, self).__init__(*args, **kwargs)
self.lam = lam = tt.as_tensor_variable(lam)
self.mean = 1.0 / self.lam
self.median = self.mean * tt.log(2)
self.mode = 0
self.variance = self.lam**-2
assert_negative_support(lam, "lam", "Exponential")
|
https://github.com/pymc-devs/pymc3/issues/1882
|
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-2-1724aa75761e> in <module>()
11 #arrival time model
12 t = pm.Lognormal('t', 100, 50, shape=cluster_number)
---> 13 t_obs = pm.Mixture('t_obs', w, pm.Exponential.dist(t), observed=time)
14
/opt/conda/lib/python3.5/site-packages/pymc3/distributions/distribution.py in __new__(cls, name, *args, **kwargs)
28 if isinstance(name, string_types):
29 data = kwargs.pop('observed', None)
---> 30 dist = cls.dist(*args, **kwargs)
31 return model.Var(name, dist, data)
32 else:
/opt/conda/lib/python3.5/site-packages/pymc3/distributions/distribution.py in dist(cls, *args, **kwargs)
39 def dist(cls, *args, **kwargs):
40 dist = object.__new__(cls)
---> 41 dist.__init__(*args, **kwargs)
42 return dist
43
/opt/conda/lib/python3.5/site-packages/pymc3/distributions/mixture.py in __init__(self, w, comp_dists, *args, **kwargs)
63 comp_modes = self._comp_modes()
64 comp_mode_logps = self.logp(comp_modes)
---> 65 self.mode = comp_modes[tt.argmax(w * comp_mode_logps, axis=-1)]
66
67 if 'mode' not in defaults:
/opt/conda/lib/python3.5/site-packages/theano/tensor/var.py in __getitem__(self, args)
530 self, *theano.tensor.subtensor.Subtensor.collapse(
531 args,
--> 532 lambda entry: isinstance(entry, Variable)))
533
534 def take(self, indices, axis=None, mode='raise'):
/opt/conda/lib/python3.5/site-packages/theano/gof/op.py in __call__(self, *inputs, **kwargs)
609 """
610 return_list = kwargs.pop('return_list', False)
--> 611 node = self.make_node(*inputs, **kwargs)
612
613 if config.compute_test_value != 'off':
/opt/conda/lib/python3.5/site-packages/theano/tensor/subtensor.py in make_node(self, x, *inputs)
482 len(idx_list), x.type.ndim))
483 exception.subtensor_invalid = True
--> 484 raise exception
485
486 input_types = Subtensor.collapse(idx_list,
ValueError: The index list is longer (size 1) than the number of dimensions of the tensor(namely 0). You are asking for a dimension of the tensor that does not exist! You might need to use dimshuffle to add extra dimension to your tensor.
|
ValueError
|
def reshape_sampled(sampled, size, dist_shape):
dist_shape = infer_shape(dist_shape)
repeat_shape = infer_shape(size)
if np.size(sampled) == 1 or repeat_shape or dist_shape:
return np.reshape(sampled, repeat_shape + dist_shape)
else:
return sampled
|
def reshape_sampled(sampled, size, dist_shape):
dist_shape = infer_shape(dist_shape)
repeat_shape = infer_shape(size)
return np.reshape(sampled, repeat_shape + dist_shape)
|
https://github.com/pymc-devs/pymc3/issues/1695
|
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from matplotlib import pyplot as plt\n",
"import numpy as np\n",
"import pymc3 as pm\n",
"import seaborn as sns\n",
"from theano import shared, tensor as tt"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"N = 100"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x = np.random.normal(size=N)\n",
"y = x + np.random.normal(scale=0.5, size=N)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x_shared = shared(x)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"with pm.Model() as model:\n",
" a = pm.Normal('a', 0., 100.)\n",
" b = pm.Normal('b', 0., 100.)\n",
" \n",
" log_sigma = pm.Uniform('log_sigma', -5., 5.)\n",
" sigma = pm.Deterministic('sigma', tt.exp(log_sigma))\n",
" \n",
" obs = pm.Normal('obs', a + b * x_shared, sigma, observed=y)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"Auto-assigning NUTS sampler...\n",
"Initializing NUTS using advi...\n",
"Average ELBO = -37,872: 100%|ββββββββββ| 100/100 [00:00<00:00, 5238.03it/s]\n",
"Finished [100%]: Average ELBO = -9,205.5\n",
"100%|ββββββββββ| 1000/1000 [00:00<00:00, 3097.76it/s]\n"
]
}
],
"source": [
"with model:\n",
" trace = pm.sample(1000, n_init=100)"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"x_pred = np.linspace(-3, 3, 200)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"\r",
" 0%| | 0/100 [00:00<?, ?it/s]"
]
},
{
"ename": "ValueError",
"evalue": "total size of new array must be unchanged",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-9-e7b6443dd7be>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mpp_trace\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpm\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msample_ppc\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrace\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;36m100\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/sampling.py\u001b[0m in \u001b[0;36msample_ppc\u001b[0;34m(trace, samples, model, vars, size, random_seed, progressbar)\u001b[0m\n\u001b[1;32m 396\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mvar\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mvars\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 397\u001b[0m ppc[var.name].append(var.distribution.random(point=param,\n\u001b[0;32m--> 398\u001b[0;31m size=size))\n\u001b[0m\u001b[1;32m 399\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 400\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0mk\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masarray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mk\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mv\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mppc\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mitems\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/distributions/continuous.py\u001b[0m in \u001b[0;36mrandom\u001b[0;34m(self, point, size, repeat)\u001b[0m\n\u001b[1;32m 238\u001b[0m return generate_samples(stats.norm.rvs, loc=mu, scale=tau**-0.5,\n\u001b[1;32m 239\u001b[0m \u001b[0mdist_shape\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 240\u001b[0;31m size=size)\n\u001b[0m\u001b[1;32m 241\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 242\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mlogp\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvalue\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/distributions/distribution.py\u001b[0m in \u001b[0;36mgenerate_samples\u001b[0;34m(generator, *args, **kwargs)\u001b[0m\n\u001b[1;32m 362\u001b[0m \u001b[0mprefix_shape\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 363\u001b[0m *args, **kwargs)\n\u001b[0;32m--> 364\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mreshape_sampled\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msamples\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0msize\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdist_shape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 365\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 366\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/home/jovyan/pymc3/pymc3/distributions/distribution.py\u001b[0m in \u001b[0;36mreshape_sampled\u001b[0;34m(sampled, size, dist_shape)\u001b[0m\n\u001b[1;32m 280\u001b[0m \u001b[0mdist_shape\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minfer_shape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdist_shape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 281\u001b[0m \u001b[0mrepeat_shape\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minfer_shape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msize\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 282\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msampled\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mrepeat_shape\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mdist_shape\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 283\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 284\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/opt/conda/lib/python3.5/site-packages/numpy/core/fromnumeric.py\u001b[0m in \u001b[0;36mreshape\u001b[0;34m(a, newshape, order)\u001b[0m\n\u001b[1;32m 222\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mAttributeError\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 223\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0m_wrapit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m'reshape'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mnewshape\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0morder\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0morder\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 224\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnewshape\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0morder\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0morder\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 225\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 226\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mValueError\u001b[0m: total size of new array must be unchanged"
]
}
],
"source": [
"x_shared.set_value(x_pred)\n",
"\n",
"with model:\n",
" pp_trace = pm.sample_ppc(trace, 100)"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {
"collapsed": false
},
"outputs": [
{
"data": {
"text/plain": [
"(array([100]), array([200]))"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"obs.shape.eval(), x_shared.shape.eval()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
}
},
"nbformat": 4,
"nbformat_minor": 1
}
|
ValueError
|
def find_MAP(
start=None, vars=None, fmin=None, return_raw=False, model=None, *args, **kwargs
):
"""
Sets state to the local maximum a posteriori point given a model.
Current default of fmin_Hessian does not deal well with optimizing close
to sharp edges, especially if they are the minimum.
Parameters
----------
start : `dict` of parameter values (Defaults to `model.test_point`)
vars : list
List of variables to set to MAP point (Defaults to all continuous).
fmin : function
Optimization algorithm (Defaults to `scipy.optimize.fmin_bfgs` unless
discrete variables are specified in `vars`, then
`scipy.optimize.fmin_powell` which will perform better).
return_raw : Bool
Whether to return extra value returned by fmin (Defaults to `False`)
model : Model (optional if in `with` context)
*args, **kwargs
Extra args passed to fmin
"""
model = modelcontext(model)
if start is None:
start = model.test_point
if not set(start.keys()).issubset(model.named_vars.keys()):
extra_keys = ", ".join(set(start.keys()) - set(model.named_vars.keys()))
valid_keys = ", ".join(model.named_vars.keys())
raise KeyError(
"Some start parameters do not appear in the model!\n"
"Valid keys are: {}, but {} was supplied".format(valid_keys, extra_keys)
)
if vars is None:
vars = model.cont_vars
vars = inputvars(vars)
disc_vars = list(typefilter(vars, discrete_types))
try:
model.fastdlogp(vars)
gradient_avail = True
except AttributeError:
gradient_avail = False
if disc_vars or not gradient_avail:
pm._log.warning(
"Warning: gradient not available."
+ "(E.g. vars contains discrete variables). MAP "
+ "estimates may not be accurate for the default "
+ "parameters. Defaulting to non-gradient minimization "
+ "fmin_powell."
)
fmin = optimize.fmin_powell
if fmin is None:
if disc_vars:
fmin = optimize.fmin_powell
else:
fmin = optimize.fmin_bfgs
allinmodel(vars, model)
start = Point(start, model=model)
bij = DictToArrayBijection(ArrayOrdering(vars), start)
logp = bij.mapf(model.fastlogp)
def logp_o(point):
return nan_to_high(-logp(point))
# Check to see if minimization function actually uses the gradient
if "fprime" in getargspec(fmin).args:
dlogp = bij.mapf(model.fastdlogp(vars))
def grad_logp_o(point):
return nan_to_num(-dlogp(point))
r = fmin(logp_o, bij.map(start), fprime=grad_logp_o, *args, **kwargs)
compute_gradient = True
else:
compute_gradient = False
# Check to see if minimization function uses a starting value
if "x0" in getargspec(fmin).args:
r = fmin(logp_o, bij.map(start), *args, **kwargs)
else:
r = fmin(logp_o, *args, **kwargs)
if isinstance(r, tuple):
mx0 = r[0]
else:
mx0 = r
mx = bij.rmap(mx0)
allfinite_mx0 = allfinite(mx0)
allfinite_logp = allfinite(model.logp(mx))
if compute_gradient:
allfinite_dlogp = allfinite(model.dlogp()(mx))
else:
allfinite_dlogp = True
if not allfinite_mx0 or not allfinite_logp or not allfinite_dlogp:
messages = []
for var in vars:
vals = {"value": mx[var.name], "logp": var.logp(mx)}
if compute_gradient:
vals["dlogp"] = var.dlogp()(mx)
def message(name, values):
if np.size(values) < 10:
return name + " bad: " + str(values)
else:
idx = np.nonzero(logical_not(isfinite(values)))
return (
name
+ " bad at idx: "
+ str(idx)
+ " with values: "
+ str(values[idx])
)
messages += [
message(var.name + "." + k, v)
for k, v in vals.items()
if not allfinite(v)
]
specific_errors = "\n".join(messages)
raise ValueError(
"Optimization error: max, logp or dlogp at "
+ "max have non-finite values. Some values may be "
+ "outside of distribution support. max: "
+ repr(mx)
+ " logp: "
+ repr(model.logp(mx))
+ " dlogp: "
+ repr(model.dlogp()(mx))
+ "Check that "
+ "1) you don't have hierarchical parameters, "
+ "these will lead to points with infinite "
+ "density. 2) your distribution logp's are "
+ "properly specified. Specific issues: \n"
+ specific_errors
)
mx = {v.name: mx[v.name].astype(v.dtype) for v in model.vars}
if return_raw:
return mx, r
else:
return mx
|
def find_MAP(
start=None, vars=None, fmin=None, return_raw=False, model=None, *args, **kwargs
):
"""
Sets state to the local maximum a posteriori point given a model.
Current default of fmin_Hessian does not deal well with optimizing close
to sharp edges, especially if they are the minimum.
Parameters
----------
start : `dict` of parameter values (Defaults to `model.test_point`)
vars : list
List of variables to set to MAP point (Defaults to all continuous).
fmin : function
Optimization algorithm (Defaults to `scipy.optimize.fmin_bfgs` unless
discrete variables are specified in `vars`, then
`scipy.optimize.fmin_powell` which will perform better).
return_raw : Bool
Whether to return extra value returned by fmin (Defaults to `False`)
model : Model (optional if in `with` context)
*args, **kwargs
Extra args passed to fmin
"""
model = modelcontext(model)
if start is None:
start = model.test_point
if not set(start.keys()).issubset(model.named_vars.keys()):
extra_keys = ", ".join(set(start.keys()) - set(model.named_vars.keys()))
valid_keys = ", ".join(model.named_vars.keys())
raise KeyError(
"Some start parameters do not appear in the model!\n"
"Valid keys are: {}, but {} was supplied".format(valid_keys, extra_keys)
)
if vars is None:
vars = model.cont_vars
vars = inputvars(vars)
disc_vars = list(typefilter(vars, discrete_types))
if disc_vars:
pm._log.warning(
"Warning: vars contains discrete variables. MAP "
+ "estimates may not be accurate for the default "
+ "parameters. Defaulting to non-gradient minimization "
+ "fmin_powell."
)
fmin = optimize.fmin_powell
if fmin is None:
if disc_vars:
fmin = optimize.fmin_powell
else:
fmin = optimize.fmin_bfgs
allinmodel(vars, model)
start = Point(start, model=model)
bij = DictToArrayBijection(ArrayOrdering(vars), start)
logp = bij.mapf(model.fastlogp)
dlogp = bij.mapf(model.fastdlogp(vars))
def logp_o(point):
return nan_to_high(-logp(point))
def grad_logp_o(point):
return nan_to_num(-dlogp(point))
# Check to see if minimization function actually uses the gradient
if "fprime" in getargspec(fmin).args:
r = fmin(logp_o, bij.map(start), fprime=grad_logp_o, *args, **kwargs)
else:
# Check to see if minimization function uses a starting value
if "x0" in getargspec(fmin).args:
r = fmin(logp_o, bij.map(start), *args, **kwargs)
else:
r = fmin(logp_o, *args, **kwargs)
if isinstance(r, tuple):
mx0 = r[0]
else:
mx0 = r
mx = bij.rmap(mx0)
if (
not allfinite(mx0)
or not allfinite(model.logp(mx))
or not allfinite(model.dlogp()(mx))
):
messages = []
for var in vars:
vals = {
"value": mx[var.name],
"logp": var.logp(mx),
"dlogp": var.dlogp()(mx),
}
def message(name, values):
if np.size(values) < 10:
return name + " bad: " + str(values)
else:
idx = np.nonzero(logical_not(isfinite(values)))
return (
name
+ " bad at idx: "
+ str(idx)
+ " with values: "
+ str(values[idx])
)
messages += [
message(var.name + "." + k, v)
for k, v in vals.items()
if not allfinite(v)
]
specific_errors = "\n".join(messages)
raise ValueError(
"Optimization error: max, logp or dlogp at "
+ "max have non-finite values. Some values may be "
+ "outside of distribution support. max: "
+ repr(mx)
+ " logp: "
+ repr(model.logp(mx))
+ " dlogp: "
+ repr(model.dlogp()(mx))
+ "Check that "
+ "1) you don't have hierarchical parameters, "
+ "these will lead to points with infinite "
+ "density. 2) your distribution logp's are "
+ "properly specified. Specific issues: \n"
+ specific_errors
)
mx = {v.name: mx[v.name].astype(v.dtype) for v in model.vars}
if return_raw:
return mx, r
else:
return mx
|
https://github.com/pymc-devs/pymc3/issues/639
|
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
/mnt/sda1/JoeFiles/Joe_Home/PythonWorkarea/pyMCMCworks/MyModel_3.py in <module>()
87
88 # Inference...
---> 89 start = pm.find_MAP() # Find starting value by optimization
90 # start = {'m': 14., 'a': 11.}
91
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/pymc/tuning/starting.pyc in find_MAP(start, vars, fmin, return_raw, disp, model, *args, **kwargs)
67
68 logp = bij.mapf(model.fastlogp)
---> 69 dlogp = bij.mapf(model.fastdlogp(vars))
70
71 def logp_o(point):
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/pymc/model.pyc in fastdlogp(self, vars)
71 def fastdlogp(self, vars=None):
72 """Compiled log probability density gradient function"""
---> 73 return self.model.fastfn(gradient(self.logpt, vars))
74
75 def fastd2logp(self, vars=None):
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/pymc/memoize.pyc in memoizer(*args, **kwargs)
12
13 if key not in cache:
---> 14 cache[key] = obj(*args, **kwargs)
15
16 return cache[key]
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/pymc/theanof.pyc in gradient(f, vars)
49 vars = cont_inputs(f)
50
---> 51 return t.concatenate([gradient1(f, v) for v in vars], axis=0)
52
53
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/pymc/theanof.pyc in gradient1(f, v)
41 def gradient1(f, v):
42 """flat gradient of f wrt v"""
---> 43 return t.flatten(t.grad(f, v, disconnected_inputs='warn'))
44
45
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/theano/gradient.pyc in grad(cost, wrt, consider_constant, disconnected_inputs, add_names, known_grads, return_disconnected)
527
528 rval = _populate_grad_dict(var_to_app_to_idx,
--> 529 grad_dict, wrt, cost_name)
530
531 for i in xrange(len(rval)):
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/theano/gradient.pyc in _populate_grad_dict(var_to_app_to_idx, grad_dict, wrt, cost_name)
1211 return grad_dict[var]
1212
-> 1213 rval = [access_grad_cache(elem) for elem in wrt]
1214
1215 return rval
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/theano/gradient.pyc in access_grad_cache(var)
1171 for idx in node_to_idx[node]:
1172
-> 1173 term = access_term_cache(node)[idx]
1174
1175 if not isinstance(term, gof.Variable):
/mnt/sda1/JoeFiles/Joe_Home64/Python4Astronomy/lib/python2.7/site-packages/theano/gradient.pyc in access_term_cache(node)
1032 str(g_shape))
1033
-> 1034 input_grads = node.op.grad(inputs, new_output_grads)
1035
1036 if input_grads is None:
AttributeError: 'FromFunctionOp' object has no attribute 'grad'
|
AttributeError
|
def __getitem__(self, index_value):
"""
Return copy NpTrace with sliced sample values if a slice is passed,
or the array of samples if a varname is passed.
"""
if isinstance(index_value, slice):
sliced_trace = NpTrace(self.vars)
sliced_trace.samples = dict(
(name, vals[index_value]) for (name, vals) in self.samples.items()
)
return sliced_trace
else:
try:
return self.point(index_value)
except (ValueError, TypeError, IndexError):
pass
return self.samples[str(index_value)].value
|
def __getitem__(self, index_value):
"""
Return copy NpTrace with sliced sample values if a slice is passed,
or the array of samples if a varname is passed.
"""
if isinstance(index_value, slice):
sliced_trace = NpTrace(self.vars)
sliced_trace.samples = dict(
(name, vals[index_value]) for (name, vals) in self.samples.items()
)
return sliced_trace
else:
try:
return self.point(index_value)
except ValueError:
pass
except TypeError:
pass
return self.samples[str(index_value)].value
|
https://github.com/pymc-devs/pymc3/issues/488
|
======================================================================
ERROR: pymc.tests.test_plots.test_plots
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Library/Python/2.7/site-packages/nose-1.2.1-py2.7.egg/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/Users/fonnescj/Code/pymc/pymc/tests/test_plots.py", line 20, in test_plots
forestplot(trace)
File "/Users/fonnescj/Code/pymc/pymc/plots.py", line 329, in forestplot
trace_quantiles = quantiles(tr, qlist)
File "/Users/fonnescj/Code/pymc/pymc/stats.py", line 41, in wrapped_f
return {v: f(pymc_obj[v][burn:], *args, **kwargs) for v in vars}
File "/Users/fonnescj/Code/pymc/pymc/stats.py", line 41, in <dictcomp>
return {v: f(pymc_obj[v][burn:], *args, **kwargs) for v in vars}
File "/Users/fonnescj/Code/pymc/pymc/trace.py", line 45, in __getitem__
return self.point(index_value)
File "/Users/fonnescj/Code/pymc/pymc/trace.py", line 57, in point
return dict((k, v.value[index]) for (k, v) in self.samples.items())
File "/Users/fonnescj/Code/pymc/pymc/trace.py", line 57, in <genexpr>
return dict((k, v.value[index]) for (k, v) in self.samples.items())
IndexError: only integers, slices (`:`), ellipsis (`...`), numpy.newaxis (`None`) and integer or boolean arrays are valid indices
-------------------- >> begin captured stdout << ---------------------
[-----------------97%----------------- ] 2930 of 3000 complete in 0.5 sec
[-----------------100%-----------------] 3000 of 3000 complete in 0.5 sec
--------------------- >> end captured stdout << ----------------------
======================================================================
ERROR: pymc.tests.test_plots.test_multichain_plots
----------------------------------------------------------------------
Traceback (most recent call last):
File "/Library/Python/2.7/site-packages/nose-1.2.1-py2.7.egg/nose/case.py", line 197, in runTest
self.test(*self.arg)
File "/Users/fonnescj/Code/pymc/pymc/tests/test_plots.py", line 36, in test_multichain_plots
forestplot(ptrace, vars=['early_mean', 'late_mean'])
File "/Users/fonnescj/Code/pymc/pymc/plots.py", line 290, in forestplot
R = gelman_rubin(trace_obj)
File "/Users/fonnescj/Code/pymc/pymc/diagnostics.py", line 163, in gelman_rubin
x = np.array([mtrace.traces[i][var] for i in range(m)])
File "/Users/fonnescj/Code/pymc/pymc/trace.py", line 45, in __getitem__
return self.point(index_value)
File "/Users/fonnescj/Code/pymc/pymc/trace.py", line 57, in point
return dict((k, v.value[index]) for (k, v) in self.samples.items())
File "/Users/fonnescj/Code/pymc/pymc/trace.py", line 57, in <genexpr>
return dict((k, v.value[index]) for (k, v) in self.samples.items())
IndexError: only integers, slices (`:`), ellipsis (`...`), numpy.newaxis (`None`) and integer or boolean arrays are valid indices
----------------------------------------------------------------------
Ran 2 tests in 44.915s
FAILED (errors=2)
|
IndexError
|
async def purge_history(
self, room_id: str, token: str, delete_local_events: bool
) -> Set[int]:
"""Deletes room history before a certain point.
Note that only a single purge can occur at once, this is guaranteed via
a higher level (in the PaginationHandler).
Args:
room_id:
token: A topological token to delete events before
delete_local_events:
if True, we will delete local events as well as remote ones
(instead of just marking them as outliers and deleting their
state groups).
Returns:
The set of state groups that are referenced by deleted events.
"""
parsed_token = await RoomStreamToken.parse(self, token)
return await self.db_pool.runInteraction(
"purge_history",
self._purge_history_txn,
room_id,
parsed_token,
delete_local_events,
)
|
async def purge_history(
self, room_id: str, token: str, delete_local_events: bool
) -> Set[int]:
"""Deletes room history before a certain point
Args:
room_id:
token: A topological token to delete events before
delete_local_events:
if True, we will delete local events as well as remote ones
(instead of just marking them as outliers and deleting their
state groups).
Returns:
The set of state groups that are referenced by deleted events.
"""
parsed_token = await RoomStreamToken.parse(self, token)
return await self.db_pool.runInteraction(
"purge_history",
self._purge_history_txn,
room_id,
parsed_token,
delete_local_events,
)
|
https://github.com/matrix-org/synapse/issues/9481
|
synapse.http.server: [POST-10040] Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7f57646fa970 method='POST' uri='/_matrix/client/r0/join/%23synapse%3Amatrix.org' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 252, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 430, in _async_render
callback_return = await raw_callback_return
File "/usr/lib/python3.9/site-packages/synapse/rest/client/v1/room.py", line 301, in on_POST
await self.room_member_handler.update_membership(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 333, in update_membership
result = await self.update_membership_locked(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 549, in update_membership_locked
remote_join_response = await self._remote_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 1091, in _remote_join
event_id, stream_id = await self.federation_handler.do_invite_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 1400, in do_invite_join
max_stream_id = await self._persist_auth_tree(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2050, in _persist_auth_tree
await self.persist_events_and_notify(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2925, in persist_events_and_notify
events, max_stream_token = await self.storage.persistence.persist_events(
File "/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py", line 262, in persist_events
ret_vals = await make_deferred_yieldable(
twisted.internet.defer.FirstError: FirstError[#0, [Failure instance: Traceback: <class 'psycopg2.errors.UniqueViolation'>: duplicate key value violates unique constraint "event_auth_chains_pkey"
DETAIL: Key (event_id)=($e9U026auDHIgaZPAqlblvPupACjl7jcZDblP970dJPs) already exists.
/usr/lib/python3.9/site-packages/synapse/metrics/background_process_metrics.py:208:run
--- <exception caught here> ---
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:172:handle_queue_loop
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:324:persisting_queue
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:532:_persist_events
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:171:_persist_events_and_state_updates
/usr/lib/python3.9/site-packages/synapse/storage/database.py:661:runInteraction
/usr/lib/python3.9/site-packages/synapse/storage/database.py:744:runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:250:inContext
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:266:<lambda>
/usr/lib64/python3.9/site-packages/twisted/python/context.py:122:callWithContext
/usr/lib64/python3.9/site-packages/twisted/python/context.py:85:callWithContext
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:306:_runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/compat.py:464:reraise
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:297:_runWithConnection
/usr/lib/python3.9/site-packages/synapse/storage/database.py:739:inner_func
/usr/lib/python3.9/site-packages/synapse/storage/database.py:539:new_transaction
/usr/lib/python3.9/site-packages/synapse/logging/utils.py:71:wrapped
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:379:_persist_events_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:472:_persist_event_auth_chain_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:630:_add_chain_cover_index
/usr/lib/python3.9/site-packages/synapse/storage/database.py:896:simple_insert_many_txn
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:execute_batch
/usr/lib/python3.9/site-packages/synapse/storage/database.py:319:_do_execute
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:<lambda>
/usr/lib64/python3.9/site-packages/psycopg2/extras.py:1209:execute_batch
]]
|
twisted.internet.defer.FirstError
|
def _purge_history_txn(
self, txn, room_id: str, token: RoomStreamToken, delete_local_events: bool
) -> Set[int]:
# Tables that should be pruned:
# event_auth
# event_backward_extremities
# event_edges
# event_forward_extremities
# event_json
# event_push_actions
# event_reference_hashes
# event_relations
# event_search
# event_to_state_groups
# events
# rejections
# room_depth
# state_groups
# state_groups_state
# destination_rooms
# we will build a temporary table listing the events so that we don't
# have to keep shovelling the list back and forth across the
# connection. Annoyingly the python sqlite driver commits the
# transaction on CREATE, so let's do this first.
#
# furthermore, we might already have the table from a previous (failed)
# purge attempt, so let's drop the table first.
txn.execute("DROP TABLE IF EXISTS events_to_purge")
txn.execute(
"CREATE TEMPORARY TABLE events_to_purge ("
" event_id TEXT NOT NULL,"
" should_delete BOOLEAN NOT NULL"
")"
)
# First ensure that we're not about to delete all the forward extremeties
txn.execute(
"SELECT e.event_id, e.depth FROM events as e "
"INNER JOIN event_forward_extremities as f "
"ON e.event_id = f.event_id "
"AND e.room_id = f.room_id "
"WHERE f.room_id = ?",
(room_id,),
)
rows = txn.fetchall()
max_depth = max(row[1] for row in rows)
if max_depth < token.topological:
# We need to ensure we don't delete all the events from the database
# otherwise we wouldn't be able to send any events (due to not
# having any backwards extremities)
raise SynapseError(
400, "topological_ordering is greater than forward extremeties"
)
logger.info("[purge] looking for events to delete")
should_delete_expr = "state_key IS NULL"
should_delete_params = () # type: Tuple[Any, ...]
if not delete_local_events:
should_delete_expr += " AND event_id NOT LIKE ?"
# We include the parameter twice since we use the expression twice
should_delete_params += ("%:" + self.hs.hostname, "%:" + self.hs.hostname)
should_delete_params += (room_id, token.topological)
# Note that we insert events that are outliers and aren't going to be
# deleted, as nothing will happen to them.
txn.execute(
"INSERT INTO events_to_purge"
" SELECT event_id, %s"
" FROM events AS e LEFT JOIN state_events USING (event_id)"
" WHERE (NOT outlier OR (%s)) AND e.room_id = ? AND topological_ordering < ?"
% (should_delete_expr, should_delete_expr),
should_delete_params,
)
# We create the indices *after* insertion as that's a lot faster.
# create an index on should_delete because later we'll be looking for
# the should_delete / shouldn't_delete subsets
txn.execute(
"CREATE INDEX events_to_purge_should_delete ON events_to_purge(should_delete)"
)
# We do joins against events_to_purge for e.g. calculating state
# groups to purge, etc., so lets make an index.
txn.execute("CREATE INDEX events_to_purge_id ON events_to_purge(event_id)")
txn.execute("SELECT event_id, should_delete FROM events_to_purge")
event_rows = txn.fetchall()
logger.info(
"[purge] found %i events before cutoff, of which %i can be deleted",
len(event_rows),
sum(1 for e in event_rows if e[1]),
)
logger.info("[purge] Finding new backward extremities")
# We calculate the new entries for the backward extremities by finding
# events to be purged that are pointed to by events we're not going to
# purge.
txn.execute(
"SELECT DISTINCT e.event_id FROM events_to_purge AS e"
" INNER JOIN event_edges AS ed ON e.event_id = ed.prev_event_id"
" LEFT JOIN events_to_purge AS ep2 ON ed.event_id = ep2.event_id"
" WHERE ep2.event_id IS NULL"
)
new_backwards_extrems = txn.fetchall()
logger.info("[purge] replacing backward extremities: %r", new_backwards_extrems)
txn.execute("DELETE FROM event_backward_extremities WHERE room_id = ?", (room_id,))
# Update backward extremeties
txn.execute_batch(
"INSERT INTO event_backward_extremities (room_id, event_id) VALUES (?, ?)",
[(room_id, event_id) for (event_id,) in new_backwards_extrems],
)
logger.info("[purge] finding state groups referenced by deleted events")
# Get all state groups that are referenced by events that are to be
# deleted.
txn.execute(
"""
SELECT DISTINCT state_group FROM events_to_purge
INNER JOIN event_to_state_groups USING (event_id)
"""
)
referenced_state_groups = {sg for (sg,) in txn}
logger.info(
"[purge] found %i referenced state groups", len(referenced_state_groups)
)
logger.info("[purge] removing events from event_to_state_groups")
txn.execute(
"DELETE FROM event_to_state_groups "
"WHERE event_id IN (SELECT event_id from events_to_purge)"
)
for event_id, _ in event_rows:
txn.call_after(self._get_state_group_for_event.invalidate, (event_id,))
# Delete all remote non-state events
for table in (
"events",
"event_json",
"event_auth",
"event_edges",
"event_forward_extremities",
"event_reference_hashes",
"event_relations",
"event_search",
"rejections",
):
logger.info("[purge] removing events from %s", table)
txn.execute(
"DELETE FROM %s WHERE event_id IN ("
" SELECT event_id FROM events_to_purge WHERE should_delete"
")" % (table,)
)
# event_push_actions lacks an index on event_id, and has one on
# (room_id, event_id) instead.
for table in ("event_push_actions",):
logger.info("[purge] removing events from %s", table)
txn.execute(
"DELETE FROM %s WHERE room_id = ? AND event_id IN ("
" SELECT event_id FROM events_to_purge WHERE should_delete"
")" % (table,),
(room_id,),
)
# Mark all state and own events as outliers
logger.info("[purge] marking remaining events as outliers")
txn.execute(
"UPDATE events SET outlier = ?"
" WHERE event_id IN ("
" SELECT event_id FROM events_to_purge "
" WHERE NOT should_delete"
")",
(True,),
)
# synapse tries to take out an exclusive lock on room_depth whenever it
# persists events (because upsert), and once we run this update, we
# will block that for the rest of our transaction.
#
# So, let's stick it at the end so that we don't block event
# persistence.
#
# We do this by calculating the minimum depth of the backwards
# extremities. However, the events in event_backward_extremities
# are ones we don't have yet so we need to look at the events that
# point to it via event_edges table.
txn.execute(
"""
SELECT COALESCE(MIN(depth), 0)
FROM event_backward_extremities AS eb
INNER JOIN event_edges AS eg ON eg.prev_event_id = eb.event_id
INNER JOIN events AS e ON e.event_id = eg.event_id
WHERE eb.room_id = ?
""",
(room_id,),
)
(min_depth,) = txn.fetchone()
logger.info("[purge] updating room_depth to %d", min_depth)
txn.execute(
"UPDATE room_depth SET min_depth = ? WHERE room_id = ?",
(min_depth, room_id),
)
# finally, drop the temp table. this will commit the txn in sqlite,
# so make sure to keep this actually last.
txn.execute("DROP TABLE events_to_purge")
logger.info("[purge] done")
return referenced_state_groups
|
def _purge_history_txn(self, txn, room_id, token, delete_local_events):
# Tables that should be pruned:
# event_auth
# event_backward_extremities
# event_edges
# event_forward_extremities
# event_json
# event_push_actions
# event_reference_hashes
# event_relations
# event_search
# event_to_state_groups
# events
# rejections
# room_depth
# state_groups
# state_groups_state
# destination_rooms
# we will build a temporary table listing the events so that we don't
# have to keep shovelling the list back and forth across the
# connection. Annoyingly the python sqlite driver commits the
# transaction on CREATE, so let's do this first.
#
# furthermore, we might already have the table from a previous (failed)
# purge attempt, so let's drop the table first.
txn.execute("DROP TABLE IF EXISTS events_to_purge")
txn.execute(
"CREATE TEMPORARY TABLE events_to_purge ("
" event_id TEXT NOT NULL,"
" should_delete BOOLEAN NOT NULL"
")"
)
# First ensure that we're not about to delete all the forward extremeties
txn.execute(
"SELECT e.event_id, e.depth FROM events as e "
"INNER JOIN event_forward_extremities as f "
"ON e.event_id = f.event_id "
"AND e.room_id = f.room_id "
"WHERE f.room_id = ?",
(room_id,),
)
rows = txn.fetchall()
max_depth = max(row[1] for row in rows)
if max_depth < token.topological:
# We need to ensure we don't delete all the events from the database
# otherwise we wouldn't be able to send any events (due to not
# having any backwards extremeties)
raise SynapseError(
400, "topological_ordering is greater than forward extremeties"
)
logger.info("[purge] looking for events to delete")
should_delete_expr = "state_key IS NULL"
should_delete_params = () # type: Tuple[Any, ...]
if not delete_local_events:
should_delete_expr += " AND event_id NOT LIKE ?"
# We include the parameter twice since we use the expression twice
should_delete_params += ("%:" + self.hs.hostname, "%:" + self.hs.hostname)
should_delete_params += (room_id, token.topological)
# Note that we insert events that are outliers and aren't going to be
# deleted, as nothing will happen to them.
txn.execute(
"INSERT INTO events_to_purge"
" SELECT event_id, %s"
" FROM events AS e LEFT JOIN state_events USING (event_id)"
" WHERE (NOT outlier OR (%s)) AND e.room_id = ? AND topological_ordering < ?"
% (should_delete_expr, should_delete_expr),
should_delete_params,
)
# We create the indices *after* insertion as that's a lot faster.
# create an index on should_delete because later we'll be looking for
# the should_delete / shouldn't_delete subsets
txn.execute(
"CREATE INDEX events_to_purge_should_delete ON events_to_purge(should_delete)"
)
# We do joins against events_to_purge for e.g. calculating state
# groups to purge, etc., so lets make an index.
txn.execute("CREATE INDEX events_to_purge_id ON events_to_purge(event_id)")
txn.execute("SELECT event_id, should_delete FROM events_to_purge")
event_rows = txn.fetchall()
logger.info(
"[purge] found %i events before cutoff, of which %i can be deleted",
len(event_rows),
sum(1 for e in event_rows if e[1]),
)
logger.info("[purge] Finding new backward extremities")
# We calculate the new entries for the backward extremeties by finding
# events to be purged that are pointed to by events we're not going to
# purge.
txn.execute(
"SELECT DISTINCT e.event_id FROM events_to_purge AS e"
" INNER JOIN event_edges AS ed ON e.event_id = ed.prev_event_id"
" LEFT JOIN events_to_purge AS ep2 ON ed.event_id = ep2.event_id"
" WHERE ep2.event_id IS NULL"
)
new_backwards_extrems = txn.fetchall()
logger.info("[purge] replacing backward extremities: %r", new_backwards_extrems)
txn.execute("DELETE FROM event_backward_extremities WHERE room_id = ?", (room_id,))
# Update backward extremeties
txn.execute_batch(
"INSERT INTO event_backward_extremities (room_id, event_id) VALUES (?, ?)",
[(room_id, event_id) for (event_id,) in new_backwards_extrems],
)
logger.info("[purge] finding state groups referenced by deleted events")
# Get all state groups that are referenced by events that are to be
# deleted.
txn.execute(
"""
SELECT DISTINCT state_group FROM events_to_purge
INNER JOIN event_to_state_groups USING (event_id)
"""
)
referenced_state_groups = {sg for (sg,) in txn}
logger.info(
"[purge] found %i referenced state groups", len(referenced_state_groups)
)
logger.info("[purge] removing events from event_to_state_groups")
txn.execute(
"DELETE FROM event_to_state_groups "
"WHERE event_id IN (SELECT event_id from events_to_purge)"
)
for event_id, _ in event_rows:
txn.call_after(self._get_state_group_for_event.invalidate, (event_id,))
# Delete all remote non-state events
for table in (
"events",
"event_json",
"event_auth",
"event_edges",
"event_forward_extremities",
"event_reference_hashes",
"event_relations",
"event_search",
"rejections",
):
logger.info("[purge] removing events from %s", table)
txn.execute(
"DELETE FROM %s WHERE event_id IN ("
" SELECT event_id FROM events_to_purge WHERE should_delete"
")" % (table,)
)
# event_push_actions lacks an index on event_id, and has one on
# (room_id, event_id) instead.
for table in ("event_push_actions",):
logger.info("[purge] removing events from %s", table)
txn.execute(
"DELETE FROM %s WHERE room_id = ? AND event_id IN ("
" SELECT event_id FROM events_to_purge WHERE should_delete"
")" % (table,),
(room_id,),
)
# Mark all state and own events as outliers
logger.info("[purge] marking remaining events as outliers")
txn.execute(
"UPDATE events SET outlier = ?"
" WHERE event_id IN ("
" SELECT event_id FROM events_to_purge "
" WHERE NOT should_delete"
")",
(True,),
)
# synapse tries to take out an exclusive lock on room_depth whenever it
# persists events (because upsert), and once we run this update, we
# will block that for the rest of our transaction.
#
# So, let's stick it at the end so that we don't block event
# persistence.
#
# We do this by calculating the minimum depth of the backwards
# extremities. However, the events in event_backward_extremities
# are ones we don't have yet so we need to look at the events that
# point to it via event_edges table.
txn.execute(
"""
SELECT COALESCE(MIN(depth), 0)
FROM event_backward_extremities AS eb
INNER JOIN event_edges AS eg ON eg.prev_event_id = eb.event_id
INNER JOIN events AS e ON e.event_id = eg.event_id
WHERE eb.room_id = ?
""",
(room_id,),
)
(min_depth,) = txn.fetchone()
logger.info("[purge] updating room_depth to %d", min_depth)
txn.execute(
"UPDATE room_depth SET min_depth = ? WHERE room_id = ?",
(min_depth, room_id),
)
# finally, drop the temp table. this will commit the txn in sqlite,
# so make sure to keep this actually last.
txn.execute("DROP TABLE events_to_purge")
logger.info("[purge] done")
return referenced_state_groups
|
https://github.com/matrix-org/synapse/issues/9481
|
synapse.http.server: [POST-10040] Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7f57646fa970 method='POST' uri='/_matrix/client/r0/join/%23synapse%3Amatrix.org' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 252, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 430, in _async_render
callback_return = await raw_callback_return
File "/usr/lib/python3.9/site-packages/synapse/rest/client/v1/room.py", line 301, in on_POST
await self.room_member_handler.update_membership(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 333, in update_membership
result = await self.update_membership_locked(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 549, in update_membership_locked
remote_join_response = await self._remote_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 1091, in _remote_join
event_id, stream_id = await self.federation_handler.do_invite_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 1400, in do_invite_join
max_stream_id = await self._persist_auth_tree(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2050, in _persist_auth_tree
await self.persist_events_and_notify(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2925, in persist_events_and_notify
events, max_stream_token = await self.storage.persistence.persist_events(
File "/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py", line 262, in persist_events
ret_vals = await make_deferred_yieldable(
twisted.internet.defer.FirstError: FirstError[#0, [Failure instance: Traceback: <class 'psycopg2.errors.UniqueViolation'>: duplicate key value violates unique constraint "event_auth_chains_pkey"
DETAIL: Key (event_id)=($e9U026auDHIgaZPAqlblvPupACjl7jcZDblP970dJPs) already exists.
/usr/lib/python3.9/site-packages/synapse/metrics/background_process_metrics.py:208:run
--- <exception caught here> ---
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:172:handle_queue_loop
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:324:persisting_queue
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:532:_persist_events
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:171:_persist_events_and_state_updates
/usr/lib/python3.9/site-packages/synapse/storage/database.py:661:runInteraction
/usr/lib/python3.9/site-packages/synapse/storage/database.py:744:runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:250:inContext
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:266:<lambda>
/usr/lib64/python3.9/site-packages/twisted/python/context.py:122:callWithContext
/usr/lib64/python3.9/site-packages/twisted/python/context.py:85:callWithContext
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:306:_runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/compat.py:464:reraise
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:297:_runWithConnection
/usr/lib/python3.9/site-packages/synapse/storage/database.py:739:inner_func
/usr/lib/python3.9/site-packages/synapse/storage/database.py:539:new_transaction
/usr/lib/python3.9/site-packages/synapse/logging/utils.py:71:wrapped
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:379:_persist_events_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:472:_persist_event_auth_chain_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:630:_add_chain_cover_index
/usr/lib/python3.9/site-packages/synapse/storage/database.py:896:simple_insert_many_txn
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:execute_batch
/usr/lib/python3.9/site-packages/synapse/storage/database.py:319:_do_execute
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:<lambda>
/usr/lib64/python3.9/site-packages/psycopg2/extras.py:1209:execute_batch
]]
|
twisted.internet.defer.FirstError
|
def _purge_room_txn(self, txn, room_id: str) -> List[int]:
# First we fetch all the state groups that should be deleted, before
# we delete that information.
txn.execute(
"""
SELECT DISTINCT state_group FROM events
INNER JOIN event_to_state_groups USING(event_id)
WHERE events.room_id = ?
""",
(room_id,),
)
state_groups = [row[0] for row in txn]
# Get all the auth chains that are referenced by events that are to be
# deleted.
txn.execute(
"""
SELECT chain_id, sequence_number FROM events
LEFT JOIN event_auth_chains USING (event_id)
WHERE room_id = ?
""",
(room_id,),
)
referenced_chain_id_tuples = list(txn)
logger.info("[purge] removing events from event_auth_chain_links")
txn.executemany(
"""
DELETE FROM event_auth_chain_links WHERE
(origin_chain_id = ? AND origin_sequence_number = ?) OR
(target_chain_id = ? AND target_sequence_number = ?)
""",
(
(chain_id, seq_num, chain_id, seq_num)
for (chain_id, seq_num) in referenced_chain_id_tuples
),
)
# Now we delete tables which lack an index on room_id but have one on event_id
for table in (
"event_auth",
"event_edges",
"event_json",
"event_push_actions_staging",
"event_reference_hashes",
"event_relations",
"event_to_state_groups",
"event_auth_chains",
"event_auth_chain_to_calculate",
"redactions",
"rejections",
"state_events",
):
logger.info("[purge] removing %s from %s", room_id, table)
txn.execute(
"""
DELETE FROM %s WHERE event_id IN (
SELECT event_id FROM events WHERE room_id=?
)
"""
% (table,),
(room_id,),
)
# and finally, the tables with an index on room_id (or no useful index)
for table in (
"current_state_events",
"destination_rooms",
"event_backward_extremities",
"event_forward_extremities",
"event_push_actions",
"event_search",
"events",
"group_rooms",
"public_room_list_stream",
"receipts_graph",
"receipts_linearized",
"room_aliases",
"room_depth",
"room_memberships",
"room_stats_state",
"room_stats_current",
"room_stats_historical",
"room_stats_earliest_token",
"rooms",
"stream_ordering_to_exterm",
"users_in_public_rooms",
"users_who_share_private_rooms",
# no useful index, but let's clear them anyway
"appservice_room_list",
"e2e_room_keys",
"event_push_summary",
"pusher_throttle",
"group_summary_rooms",
"room_account_data",
"room_tags",
"local_current_membership",
):
logger.info("[purge] removing %s from %s", room_id, table)
txn.execute("DELETE FROM %s WHERE room_id=?" % (table,), (room_id,))
# Other tables we do NOT need to clear out:
#
# - blocked_rooms
# This is important, to make sure that we don't accidentally rejoin a blocked
# room after it was purged
#
# - user_directory
# This has a room_id column, but it is unused
#
# Other tables that we might want to consider clearing out include:
#
# - event_reports
# Given that these are intended for abuse management my initial
# inclination is to leave them in place.
#
# - current_state_delta_stream
# - ex_outlier_stream
# - room_tags_revisions
# The problem with these is that they are largeish and there is no room_id
# index on them. In any case we should be clearing out 'stream' tables
# periodically anyway (#5888)
# TODO: we could probably usefully do a bunch of cache invalidation here
logger.info("[purge] done")
return state_groups
|
def _purge_room_txn(self, txn, room_id):
# First we fetch all the state groups that should be deleted, before
# we delete that information.
txn.execute(
"""
SELECT DISTINCT state_group FROM events
INNER JOIN event_to_state_groups USING(event_id)
WHERE events.room_id = ?
""",
(room_id,),
)
state_groups = [row[0] for row in txn]
# Now we delete tables which lack an index on room_id but have one on event_id
for table in (
"event_auth",
"event_edges",
"event_json",
"event_push_actions_staging",
"event_reference_hashes",
"event_relations",
"event_to_state_groups",
"redactions",
"rejections",
"state_events",
):
logger.info("[purge] removing %s from %s", room_id, table)
txn.execute(
"""
DELETE FROM %s WHERE event_id IN (
SELECT event_id FROM events WHERE room_id=?
)
"""
% (table,),
(room_id,),
)
# and finally, the tables with an index on room_id (or no useful index)
for table in (
"current_state_events",
"destination_rooms",
"event_backward_extremities",
"event_forward_extremities",
"event_push_actions",
"event_search",
"events",
"group_rooms",
"public_room_list_stream",
"receipts_graph",
"receipts_linearized",
"room_aliases",
"room_depth",
"room_memberships",
"room_stats_state",
"room_stats_current",
"room_stats_historical",
"room_stats_earliest_token",
"rooms",
"stream_ordering_to_exterm",
"users_in_public_rooms",
"users_who_share_private_rooms",
# no useful index, but let's clear them anyway
"appservice_room_list",
"e2e_room_keys",
"event_push_summary",
"pusher_throttle",
"group_summary_rooms",
"room_account_data",
"room_tags",
"local_current_membership",
):
logger.info("[purge] removing %s from %s", room_id, table)
txn.execute("DELETE FROM %s WHERE room_id=?" % (table,), (room_id,))
# Other tables we do NOT need to clear out:
#
# - blocked_rooms
# This is important, to make sure that we don't accidentally rejoin a blocked
# room after it was purged
#
# - user_directory
# This has a room_id column, but it is unused
#
# Other tables that we might want to consider clearing out include:
#
# - event_reports
# Given that these are intended for abuse management my initial
# inclination is to leave them in place.
#
# - current_state_delta_stream
# - ex_outlier_stream
# - room_tags_revisions
# The problem with these is that they are largeish and there is no room_id
# index on them. In any case we should be clearing out 'stream' tables
# periodically anyway (#5888)
# TODO: we could probably usefully do a bunch of cache invalidation here
logger.info("[purge] done")
return state_groups
|
https://github.com/matrix-org/synapse/issues/9481
|
synapse.http.server: [POST-10040] Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7f57646fa970 method='POST' uri='/_matrix/client/r0/join/%23synapse%3Amatrix.org' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 252, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 430, in _async_render
callback_return = await raw_callback_return
File "/usr/lib/python3.9/site-packages/synapse/rest/client/v1/room.py", line 301, in on_POST
await self.room_member_handler.update_membership(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 333, in update_membership
result = await self.update_membership_locked(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 549, in update_membership_locked
remote_join_response = await self._remote_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 1091, in _remote_join
event_id, stream_id = await self.federation_handler.do_invite_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 1400, in do_invite_join
max_stream_id = await self._persist_auth_tree(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2050, in _persist_auth_tree
await self.persist_events_and_notify(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2925, in persist_events_and_notify
events, max_stream_token = await self.storage.persistence.persist_events(
File "/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py", line 262, in persist_events
ret_vals = await make_deferred_yieldable(
twisted.internet.defer.FirstError: FirstError[#0, [Failure instance: Traceback: <class 'psycopg2.errors.UniqueViolation'>: duplicate key value violates unique constraint "event_auth_chains_pkey"
DETAIL: Key (event_id)=($e9U026auDHIgaZPAqlblvPupACjl7jcZDblP970dJPs) already exists.
/usr/lib/python3.9/site-packages/synapse/metrics/background_process_metrics.py:208:run
--- <exception caught here> ---
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:172:handle_queue_loop
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:324:persisting_queue
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:532:_persist_events
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:171:_persist_events_and_state_updates
/usr/lib/python3.9/site-packages/synapse/storage/database.py:661:runInteraction
/usr/lib/python3.9/site-packages/synapse/storage/database.py:744:runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:250:inContext
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:266:<lambda>
/usr/lib64/python3.9/site-packages/twisted/python/context.py:122:callWithContext
/usr/lib64/python3.9/site-packages/twisted/python/context.py:85:callWithContext
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:306:_runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/compat.py:464:reraise
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:297:_runWithConnection
/usr/lib/python3.9/site-packages/synapse/storage/database.py:739:inner_func
/usr/lib/python3.9/site-packages/synapse/storage/database.py:539:new_transaction
/usr/lib/python3.9/site-packages/synapse/logging/utils.py:71:wrapped
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:379:_persist_events_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:472:_persist_event_auth_chain_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:630:_add_chain_cover_index
/usr/lib/python3.9/site-packages/synapse/storage/database.py:896:simple_insert_many_txn
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:execute_batch
/usr/lib/python3.9/site-packages/synapse/storage/database.py:319:_do_execute
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:<lambda>
/usr/lib64/python3.9/site-packages/psycopg2/extras.py:1209:execute_batch
]]
|
twisted.internet.defer.FirstError
|
async def _find_unreferenced_groups(self, state_groups: Set[int]) -> Set[int]:
"""Used when purging history to figure out which state groups can be
deleted.
Args:
state_groups: Set of state groups referenced by events
that are going to be deleted.
Returns:
The set of state groups that can be deleted.
"""
# Set of events that we have found to be referenced by events
referenced_groups = set()
# Set of state groups we've already seen
state_groups_seen = set(state_groups)
# Set of state groups to handle next.
next_to_search = set(state_groups)
while next_to_search:
# We bound size of groups we're looking up at once, to stop the
# SQL query getting too big
if len(next_to_search) < 100:
current_search = next_to_search
next_to_search = set()
else:
current_search = set(itertools.islice(next_to_search, 100))
next_to_search -= current_search
referenced = await self.stores.main.get_referenced_state_groups(current_search)
referenced_groups |= referenced
# We don't continue iterating up the state group graphs for state
# groups that are referenced.
current_search -= referenced
edges = await self.stores.state.get_previous_state_groups(current_search)
prevs = set(edges.values())
# We don't bother re-handling groups we've already seen
prevs -= state_groups_seen
next_to_search |= prevs
state_groups_seen |= prevs
to_delete = state_groups_seen - referenced_groups
return to_delete
|
async def _find_unreferenced_groups(self, state_groups: Set[int]) -> Set[int]:
"""Used when purging history to figure out which state groups can be
deleted.
Args:
state_groups: Set of state groups referenced by events
that are going to be deleted.
Returns:
The set of state groups that can be deleted.
"""
# Graph of state group -> previous group
graph = {}
# Set of events that we have found to be referenced by events
referenced_groups = set()
# Set of state groups we've already seen
state_groups_seen = set(state_groups)
# Set of state groups to handle next.
next_to_search = set(state_groups)
while next_to_search:
# We bound size of groups we're looking up at once, to stop the
# SQL query getting too big
if len(next_to_search) < 100:
current_search = next_to_search
next_to_search = set()
else:
current_search = set(itertools.islice(next_to_search, 100))
next_to_search -= current_search
referenced = await self.stores.main.get_referenced_state_groups(current_search)
referenced_groups |= referenced
# We don't continue iterating up the state group graphs for state
# groups that are referenced.
current_search -= referenced
edges = await self.stores.state.get_previous_state_groups(current_search)
prevs = set(edges.values())
# We don't bother re-handling groups we've already seen
prevs -= state_groups_seen
next_to_search |= prevs
state_groups_seen |= prevs
graph.update(edges)
to_delete = state_groups_seen - referenced_groups
return to_delete
|
https://github.com/matrix-org/synapse/issues/9481
|
synapse.http.server: [POST-10040] Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7f57646fa970 method='POST' uri='/_matrix/client/r0/join/%23synapse%3Amatrix.org' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 252, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 430, in _async_render
callback_return = await raw_callback_return
File "/usr/lib/python3.9/site-packages/synapse/rest/client/v1/room.py", line 301, in on_POST
await self.room_member_handler.update_membership(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 333, in update_membership
result = await self.update_membership_locked(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 549, in update_membership_locked
remote_join_response = await self._remote_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 1091, in _remote_join
event_id, stream_id = await self.federation_handler.do_invite_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 1400, in do_invite_join
max_stream_id = await self._persist_auth_tree(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2050, in _persist_auth_tree
await self.persist_events_and_notify(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2925, in persist_events_and_notify
events, max_stream_token = await self.storage.persistence.persist_events(
File "/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py", line 262, in persist_events
ret_vals = await make_deferred_yieldable(
twisted.internet.defer.FirstError: FirstError[#0, [Failure instance: Traceback: <class 'psycopg2.errors.UniqueViolation'>: duplicate key value violates unique constraint "event_auth_chains_pkey"
DETAIL: Key (event_id)=($e9U026auDHIgaZPAqlblvPupACjl7jcZDblP970dJPs) already exists.
/usr/lib/python3.9/site-packages/synapse/metrics/background_process_metrics.py:208:run
--- <exception caught here> ---
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:172:handle_queue_loop
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:324:persisting_queue
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:532:_persist_events
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:171:_persist_events_and_state_updates
/usr/lib/python3.9/site-packages/synapse/storage/database.py:661:runInteraction
/usr/lib/python3.9/site-packages/synapse/storage/database.py:744:runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:250:inContext
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:266:<lambda>
/usr/lib64/python3.9/site-packages/twisted/python/context.py:122:callWithContext
/usr/lib64/python3.9/site-packages/twisted/python/context.py:85:callWithContext
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:306:_runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/compat.py:464:reraise
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:297:_runWithConnection
/usr/lib/python3.9/site-packages/synapse/storage/database.py:739:inner_func
/usr/lib/python3.9/site-packages/synapse/storage/database.py:539:new_transaction
/usr/lib/python3.9/site-packages/synapse/logging/utils.py:71:wrapped
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:379:_persist_events_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:472:_persist_event_auth_chain_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:630:_add_chain_cover_index
/usr/lib/python3.9/site-packages/synapse/storage/database.py:896:simple_insert_many_txn
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:execute_batch
/usr/lib/python3.9/site-packages/synapse/storage/database.py:319:_do_execute
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:<lambda>
/usr/lib64/python3.9/site-packages/psycopg2/extras.py:1209:execute_batch
]]
|
twisted.internet.defer.FirstError
|
def __init__(self, database: DatabasePool, db_conn, hs):
super().__init__(database, db_conn, hs)
self.db_pool.updates.register_background_update_handler(
self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
)
self.db_pool.updates.register_background_update_handler(
self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME,
self._background_reindex_fields_sender,
)
self.db_pool.updates.register_background_index_update(
"event_contains_url_index",
index_name="event_contains_url_index",
table="events",
columns=["room_id", "topological_ordering", "stream_ordering"],
where_clause="contains_url = true AND outlier = false",
)
# an event_id index on event_search is useful for the purge_history
# api. Plus it means we get to enforce some integrity with a UNIQUE
# clause
self.db_pool.updates.register_background_index_update(
"event_search_event_id_idx",
index_name="event_search_event_id_idx",
table="event_search",
columns=["event_id"],
unique=True,
psql_only=True,
)
self.db_pool.updates.register_background_update_handler(
self.DELETE_SOFT_FAILED_EXTREMITIES, self._cleanup_extremities_bg_update
)
self.db_pool.updates.register_background_update_handler(
"redactions_received_ts", self._redactions_received_ts
)
# This index gets deleted in `event_fix_redactions_bytes` update
self.db_pool.updates.register_background_index_update(
"event_fix_redactions_bytes_create_index",
index_name="redactions_censored_redacts",
table="redactions",
columns=["redacts"],
where_clause="have_censored",
)
self.db_pool.updates.register_background_update_handler(
"event_fix_redactions_bytes", self._event_fix_redactions_bytes
)
self.db_pool.updates.register_background_update_handler(
"event_store_labels", self._event_store_labels
)
self.db_pool.updates.register_background_index_update(
"redactions_have_censored_ts_idx",
index_name="redactions_have_censored_ts",
table="redactions",
columns=["received_ts"],
where_clause="NOT have_censored",
)
self.db_pool.updates.register_background_index_update(
"users_have_local_media",
index_name="users_have_local_media",
table="local_media_repository",
columns=["user_id", "created_ts"],
)
self.db_pool.updates.register_background_update_handler(
"rejected_events_metadata",
self._rejected_events_metadata,
)
self.db_pool.updates.register_background_update_handler(
"chain_cover",
self._chain_cover_index,
)
self.db_pool.updates.register_background_update_handler(
"purged_chain_cover",
self._purged_chain_cover_index,
)
|
def __init__(self, database: DatabasePool, db_conn, hs):
super().__init__(database, db_conn, hs)
self.db_pool.updates.register_background_update_handler(
self.EVENT_ORIGIN_SERVER_TS_NAME, self._background_reindex_origin_server_ts
)
self.db_pool.updates.register_background_update_handler(
self.EVENT_FIELDS_SENDER_URL_UPDATE_NAME,
self._background_reindex_fields_sender,
)
self.db_pool.updates.register_background_index_update(
"event_contains_url_index",
index_name="event_contains_url_index",
table="events",
columns=["room_id", "topological_ordering", "stream_ordering"],
where_clause="contains_url = true AND outlier = false",
)
# an event_id index on event_search is useful for the purge_history
# api. Plus it means we get to enforce some integrity with a UNIQUE
# clause
self.db_pool.updates.register_background_index_update(
"event_search_event_id_idx",
index_name="event_search_event_id_idx",
table="event_search",
columns=["event_id"],
unique=True,
psql_only=True,
)
self.db_pool.updates.register_background_update_handler(
self.DELETE_SOFT_FAILED_EXTREMITIES, self._cleanup_extremities_bg_update
)
self.db_pool.updates.register_background_update_handler(
"redactions_received_ts", self._redactions_received_ts
)
# This index gets deleted in `event_fix_redactions_bytes` update
self.db_pool.updates.register_background_index_update(
"event_fix_redactions_bytes_create_index",
index_name="redactions_censored_redacts",
table="redactions",
columns=["redacts"],
where_clause="have_censored",
)
self.db_pool.updates.register_background_update_handler(
"event_fix_redactions_bytes", self._event_fix_redactions_bytes
)
self.db_pool.updates.register_background_update_handler(
"event_store_labels", self._event_store_labels
)
self.db_pool.updates.register_background_index_update(
"redactions_have_censored_ts_idx",
index_name="redactions_have_censored_ts",
table="redactions",
columns=["received_ts"],
where_clause="NOT have_censored",
)
self.db_pool.updates.register_background_index_update(
"users_have_local_media",
index_name="users_have_local_media",
table="local_media_repository",
columns=["user_id", "created_ts"],
)
self.db_pool.updates.register_background_update_handler(
"rejected_events_metadata",
self._rejected_events_metadata,
)
self.db_pool.updates.register_background_update_handler(
"chain_cover",
self._chain_cover_index,
)
|
https://github.com/matrix-org/synapse/issues/9481
|
synapse.http.server: [POST-10040] Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7f57646fa970 method='POST' uri='/_matrix/client/r0/join/%23synapse%3Amatrix.org' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 252, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 430, in _async_render
callback_return = await raw_callback_return
File "/usr/lib/python3.9/site-packages/synapse/rest/client/v1/room.py", line 301, in on_POST
await self.room_member_handler.update_membership(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 333, in update_membership
result = await self.update_membership_locked(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 549, in update_membership_locked
remote_join_response = await self._remote_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 1091, in _remote_join
event_id, stream_id = await self.federation_handler.do_invite_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 1400, in do_invite_join
max_stream_id = await self._persist_auth_tree(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2050, in _persist_auth_tree
await self.persist_events_and_notify(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2925, in persist_events_and_notify
events, max_stream_token = await self.storage.persistence.persist_events(
File "/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py", line 262, in persist_events
ret_vals = await make_deferred_yieldable(
twisted.internet.defer.FirstError: FirstError[#0, [Failure instance: Traceback: <class 'psycopg2.errors.UniqueViolation'>: duplicate key value violates unique constraint "event_auth_chains_pkey"
DETAIL: Key (event_id)=($e9U026auDHIgaZPAqlblvPupACjl7jcZDblP970dJPs) already exists.
/usr/lib/python3.9/site-packages/synapse/metrics/background_process_metrics.py:208:run
--- <exception caught here> ---
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:172:handle_queue_loop
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:324:persisting_queue
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:532:_persist_events
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:171:_persist_events_and_state_updates
/usr/lib/python3.9/site-packages/synapse/storage/database.py:661:runInteraction
/usr/lib/python3.9/site-packages/synapse/storage/database.py:744:runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:250:inContext
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:266:<lambda>
/usr/lib64/python3.9/site-packages/twisted/python/context.py:122:callWithContext
/usr/lib64/python3.9/site-packages/twisted/python/context.py:85:callWithContext
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:306:_runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/compat.py:464:reraise
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:297:_runWithConnection
/usr/lib/python3.9/site-packages/synapse/storage/database.py:739:inner_func
/usr/lib/python3.9/site-packages/synapse/storage/database.py:539:new_transaction
/usr/lib/python3.9/site-packages/synapse/logging/utils.py:71:wrapped
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:379:_persist_events_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:472:_persist_event_auth_chain_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:630:_add_chain_cover_index
/usr/lib/python3.9/site-packages/synapse/storage/database.py:896:simple_insert_many_txn
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:execute_batch
/usr/lib/python3.9/site-packages/synapse/storage/database.py:319:_do_execute
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:<lambda>
/usr/lib64/python3.9/site-packages/psycopg2/extras.py:1209:execute_batch
]]
|
twisted.internet.defer.FirstError
|
def _purge_room_txn(self, txn, room_id: str) -> List[int]:
# First we fetch all the state groups that should be deleted, before
# we delete that information.
txn.execute(
"""
SELECT DISTINCT state_group FROM events
INNER JOIN event_to_state_groups USING(event_id)
WHERE events.room_id = ?
""",
(room_id,),
)
state_groups = [row[0] for row in txn]
# Get all the auth chains that are referenced by events that are to be
# deleted.
txn.execute(
"""
SELECT chain_id, sequence_number FROM events
LEFT JOIN event_auth_chains USING (event_id)
WHERE room_id = ?
""",
(room_id,),
)
referenced_chain_id_tuples = list(txn)
logger.info("[purge] removing events from event_auth_chain_links")
txn.executemany(
"""
DELETE FROM event_auth_chain_links WHERE
origin_chain_id = ? AND origin_sequence_number = ?
""",
referenced_chain_id_tuples,
)
# Now we delete tables which lack an index on room_id but have one on event_id
for table in (
"event_auth",
"event_edges",
"event_json",
"event_push_actions_staging",
"event_reference_hashes",
"event_relations",
"event_to_state_groups",
"event_auth_chains",
"event_auth_chain_to_calculate",
"redactions",
"rejections",
"state_events",
):
logger.info("[purge] removing %s from %s", room_id, table)
txn.execute(
"""
DELETE FROM %s WHERE event_id IN (
SELECT event_id FROM events WHERE room_id=?
)
"""
% (table,),
(room_id,),
)
# and finally, the tables with an index on room_id (or no useful index)
for table in (
"current_state_events",
"destination_rooms",
"event_backward_extremities",
"event_forward_extremities",
"event_push_actions",
"event_search",
"events",
"group_rooms",
"public_room_list_stream",
"receipts_graph",
"receipts_linearized",
"room_aliases",
"room_depth",
"room_memberships",
"room_stats_state",
"room_stats_current",
"room_stats_historical",
"room_stats_earliest_token",
"rooms",
"stream_ordering_to_exterm",
"users_in_public_rooms",
"users_who_share_private_rooms",
# no useful index, but let's clear them anyway
"appservice_room_list",
"e2e_room_keys",
"event_push_summary",
"pusher_throttle",
"group_summary_rooms",
"room_account_data",
"room_tags",
"local_current_membership",
):
logger.info("[purge] removing %s from %s", room_id, table)
txn.execute("DELETE FROM %s WHERE room_id=?" % (table,), (room_id,))
# Other tables we do NOT need to clear out:
#
# - blocked_rooms
# This is important, to make sure that we don't accidentally rejoin a blocked
# room after it was purged
#
# - user_directory
# This has a room_id column, but it is unused
#
# Other tables that we might want to consider clearing out include:
#
# - event_reports
# Given that these are intended for abuse management my initial
# inclination is to leave them in place.
#
# - current_state_delta_stream
# - ex_outlier_stream
# - room_tags_revisions
# The problem with these is that they are largeish and there is no room_id
# index on them. In any case we should be clearing out 'stream' tables
# periodically anyway (#5888)
# TODO: we could probably usefully do a bunch of cache invalidation here
logger.info("[purge] done")
return state_groups
|
def _purge_room_txn(self, txn, room_id: str) -> List[int]:
# First we fetch all the state groups that should be deleted, before
# we delete that information.
txn.execute(
"""
SELECT DISTINCT state_group FROM events
INNER JOIN event_to_state_groups USING(event_id)
WHERE events.room_id = ?
""",
(room_id,),
)
state_groups = [row[0] for row in txn]
# Get all the auth chains that are referenced by events that are to be
# deleted.
txn.execute(
"""
SELECT chain_id, sequence_number FROM events
LEFT JOIN event_auth_chains USING (event_id)
WHERE room_id = ?
""",
(room_id,),
)
referenced_chain_id_tuples = list(txn)
logger.info("[purge] removing events from event_auth_chain_links")
txn.executemany(
"""
DELETE FROM event_auth_chain_links WHERE
(origin_chain_id = ? AND origin_sequence_number = ?) OR
(target_chain_id = ? AND target_sequence_number = ?)
""",
(
(chain_id, seq_num, chain_id, seq_num)
for (chain_id, seq_num) in referenced_chain_id_tuples
),
)
# Now we delete tables which lack an index on room_id but have one on event_id
for table in (
"event_auth",
"event_edges",
"event_json",
"event_push_actions_staging",
"event_reference_hashes",
"event_relations",
"event_to_state_groups",
"event_auth_chains",
"event_auth_chain_to_calculate",
"redactions",
"rejections",
"state_events",
):
logger.info("[purge] removing %s from %s", room_id, table)
txn.execute(
"""
DELETE FROM %s WHERE event_id IN (
SELECT event_id FROM events WHERE room_id=?
)
"""
% (table,),
(room_id,),
)
# and finally, the tables with an index on room_id (or no useful index)
for table in (
"current_state_events",
"destination_rooms",
"event_backward_extremities",
"event_forward_extremities",
"event_push_actions",
"event_search",
"events",
"group_rooms",
"public_room_list_stream",
"receipts_graph",
"receipts_linearized",
"room_aliases",
"room_depth",
"room_memberships",
"room_stats_state",
"room_stats_current",
"room_stats_historical",
"room_stats_earliest_token",
"rooms",
"stream_ordering_to_exterm",
"users_in_public_rooms",
"users_who_share_private_rooms",
# no useful index, but let's clear them anyway
"appservice_room_list",
"e2e_room_keys",
"event_push_summary",
"pusher_throttle",
"group_summary_rooms",
"room_account_data",
"room_tags",
"local_current_membership",
):
logger.info("[purge] removing %s from %s", room_id, table)
txn.execute("DELETE FROM %s WHERE room_id=?" % (table,), (room_id,))
# Other tables we do NOT need to clear out:
#
# - blocked_rooms
# This is important, to make sure that we don't accidentally rejoin a blocked
# room after it was purged
#
# - user_directory
# This has a room_id column, but it is unused
#
# Other tables that we might want to consider clearing out include:
#
# - event_reports
# Given that these are intended for abuse management my initial
# inclination is to leave them in place.
#
# - current_state_delta_stream
# - ex_outlier_stream
# - room_tags_revisions
# The problem with these is that they are largeish and there is no room_id
# index on them. In any case we should be clearing out 'stream' tables
# periodically anyway (#5888)
# TODO: we could probably usefully do a bunch of cache invalidation here
logger.info("[purge] done")
return state_groups
|
https://github.com/matrix-org/synapse/issues/9481
|
synapse.http.server: [POST-10040] Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7f57646fa970 method='POST' uri='/_matrix/client/r0/join/%23synapse%3Amatrix.org' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 252, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/lib/python3.9/site-packages/synapse/http/server.py", line 430, in _async_render
callback_return = await raw_callback_return
File "/usr/lib/python3.9/site-packages/synapse/rest/client/v1/room.py", line 301, in on_POST
await self.room_member_handler.update_membership(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 333, in update_membership
result = await self.update_membership_locked(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 549, in update_membership_locked
remote_join_response = await self._remote_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/room_member.py", line 1091, in _remote_join
event_id, stream_id = await self.federation_handler.do_invite_join(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 1400, in do_invite_join
max_stream_id = await self._persist_auth_tree(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2050, in _persist_auth_tree
await self.persist_events_and_notify(
File "/usr/lib/python3.9/site-packages/synapse/handlers/federation.py", line 2925, in persist_events_and_notify
events, max_stream_token = await self.storage.persistence.persist_events(
File "/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py", line 262, in persist_events
ret_vals = await make_deferred_yieldable(
twisted.internet.defer.FirstError: FirstError[#0, [Failure instance: Traceback: <class 'psycopg2.errors.UniqueViolation'>: duplicate key value violates unique constraint "event_auth_chains_pkey"
DETAIL: Key (event_id)=($e9U026auDHIgaZPAqlblvPupACjl7jcZDblP970dJPs) already exists.
/usr/lib/python3.9/site-packages/synapse/metrics/background_process_metrics.py:208:run
--- <exception caught here> ---
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:172:handle_queue_loop
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:324:persisting_queue
/usr/lib/python3.9/site-packages/synapse/storage/persist_events.py:532:_persist_events
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:171:_persist_events_and_state_updates
/usr/lib/python3.9/site-packages/synapse/storage/database.py:661:runInteraction
/usr/lib/python3.9/site-packages/synapse/storage/database.py:744:runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:250:inContext
/usr/lib64/python3.9/site-packages/twisted/python/threadpool.py:266:<lambda>
/usr/lib64/python3.9/site-packages/twisted/python/context.py:122:callWithContext
/usr/lib64/python3.9/site-packages/twisted/python/context.py:85:callWithContext
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:306:_runWithConnection
/usr/lib64/python3.9/site-packages/twisted/python/compat.py:464:reraise
/usr/lib64/python3.9/site-packages/twisted/enterprise/adbapi.py:297:_runWithConnection
/usr/lib/python3.9/site-packages/synapse/storage/database.py:739:inner_func
/usr/lib/python3.9/site-packages/synapse/storage/database.py:539:new_transaction
/usr/lib/python3.9/site-packages/synapse/logging/utils.py:71:wrapped
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:379:_persist_events_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:472:_persist_event_auth_chain_txn
/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py:630:_add_chain_cover_index
/usr/lib/python3.9/site-packages/synapse/storage/database.py:896:simple_insert_many_txn
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:execute_batch
/usr/lib/python3.9/site-packages/synapse/storage/database.py:319:_do_execute
/usr/lib/python3.9/site-packages/synapse/storage/database.py:274:<lambda>
/usr/lib64/python3.9/site-packages/psycopg2/extras.py:1209:execute_batch
]]
|
twisted.internet.defer.FirstError
|
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs
self.auth = hs.get_auth()
self.admin_handler = hs.get_admin_handler()
self.state_handler = hs.get_state_handler()
|
def __init__(self, hs: "HomeServer"):
self.hs = hs
self.auth = hs.get_auth()
self.room_member_handler = hs.get_room_member_handler()
self.admin_handler = hs.get_admin_handler()
self.state_handler = hs.get_state_handler()
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
async def on_POST(
self, request: SynapseRequest, room_identifier: str
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
content = parse_json_object_from_request(request)
assert_params_in_dict(content, ["user_id"])
target_user = UserID.from_string(content["user_id"])
if not self.hs.is_mine(target_user):
raise SynapseError(400, "This endpoint can only be used with local users")
if not await self.admin_handler.get_user(target_user):
raise NotFoundError("User not found")
# Get the room ID from the identifier.
try:
remote_room_hosts = [x.decode("ascii") for x in request.args[b"server_name"]] # type: Optional[List[str]]
except Exception:
remote_room_hosts = None
room_id, remote_room_hosts = await self.resolve_room_id(
room_identifier, remote_room_hosts
)
fake_requester = create_requester(
target_user, authenticated_entity=requester.authenticated_entity
)
# send invite if room has "JoinRules.INVITE"
room_state = await self.state_handler.get_current_state(room_id)
join_rules_event = room_state.get((EventTypes.JoinRules, ""))
if join_rules_event:
if not (join_rules_event.content.get("join_rule") == JoinRules.PUBLIC):
# update_membership with an action of "invite" can raise a
# ShadowBanError. This is not handled since it is assumed that
# an admin isn't going to call this API with a shadow-banned user.
await self.room_member_handler.update_membership(
requester=requester,
target=fake_requester.user,
room_id=room_id,
action="invite",
remote_room_hosts=remote_room_hosts,
ratelimit=False,
)
await self.room_member_handler.update_membership(
requester=fake_requester,
target=fake_requester.user,
room_id=room_id,
action="join",
remote_room_hosts=remote_room_hosts,
ratelimit=False,
)
return 200, {"room_id": room_id}
|
async def on_POST(
self, request: SynapseRequest, room_identifier: str
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
content = parse_json_object_from_request(request)
assert_params_in_dict(content, ["user_id"])
target_user = UserID.from_string(content["user_id"])
if not self.hs.is_mine(target_user):
raise SynapseError(400, "This endpoint can only be used with local users")
if not await self.admin_handler.get_user(target_user):
raise NotFoundError("User not found")
if RoomID.is_valid(room_identifier):
room_id = room_identifier
try:
remote_room_hosts = [
x.decode("ascii") for x in request.args[b"server_name"]
] # type: Optional[List[str]]
except Exception:
remote_room_hosts = None
elif RoomAlias.is_valid(room_identifier):
handler = self.room_member_handler
room_alias = RoomAlias.from_string(room_identifier)
room_id, remote_room_hosts = await handler.lookup_room_alias(room_alias)
else:
raise SynapseError(
400, "%s was not legal room ID or room alias" % (room_identifier,)
)
fake_requester = create_requester(
target_user, authenticated_entity=requester.authenticated_entity
)
# send invite if room has "JoinRules.INVITE"
room_state = await self.state_handler.get_current_state(room_id)
join_rules_event = room_state.get((EventTypes.JoinRules, ""))
if join_rules_event:
if not (join_rules_event.content.get("join_rule") == JoinRules.PUBLIC):
# update_membership with an action of "invite" can raise a
# ShadowBanError. This is not handled since it is assumed that
# an admin isn't going to call this API with a shadow-banned user.
await self.room_member_handler.update_membership(
requester=requester,
target=fake_requester.user,
room_id=room_id,
action="invite",
remote_room_hosts=remote_room_hosts,
ratelimit=False,
)
await self.room_member_handler.update_membership(
requester=fake_requester,
target=fake_requester.user,
room_id=room_id,
action="join",
remote_room_hosts=remote_room_hosts,
ratelimit=False,
)
return 200, {"room_id": room_id}
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs
self.auth = hs.get_auth()
self.event_creation_handler = hs.get_event_creation_handler()
self.state_handler = hs.get_state_handler()
self.is_mine_id = hs.is_mine_id
|
def __init__(self, hs: "HomeServer"):
self.hs = hs
self.auth = hs.get_auth()
self.room_member_handler = hs.get_room_member_handler()
self.event_creation_handler = hs.get_event_creation_handler()
self.state_handler = hs.get_state_handler()
self.is_mine_id = hs.is_mine_id
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
async def on_POST(
self, request: SynapseRequest, room_identifier: str
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
content = parse_json_object_from_request(request, allow_empty_body=True)
room_id, _ = await self.resolve_room_id(room_identifier)
# Which user to grant room admin rights to.
user_to_add = content.get("user_id", requester.user.to_string())
# Figure out which local users currently have power in the room, if any.
room_state = await self.state_handler.get_current_state(room_id)
if not room_state:
raise SynapseError(400, "Server not in room")
create_event = room_state[(EventTypes.Create, "")]
power_levels = room_state.get((EventTypes.PowerLevels, ""))
if power_levels is not None:
# We pick the local user with the highest power.
user_power = power_levels.content.get("users", {})
admin_users = [user_id for user_id in user_power if self.is_mine_id(user_id)]
admin_users.sort(key=lambda user: user_power[user])
if not admin_users:
raise SynapseError(400, "No local admin user in room")
admin_user_id = None
for admin_user in reversed(admin_users):
if room_state.get((EventTypes.Member, admin_user)):
admin_user_id = admin_user
break
if not admin_user_id:
raise SynapseError(
400,
"No local admin user in room",
)
pl_content = power_levels.content
else:
# If there is no power level events then the creator has rights.
pl_content = {}
admin_user_id = create_event.sender
if not self.is_mine_id(admin_user_id):
raise SynapseError(
400,
"No local admin user in room",
)
# Grant the user power equal to the room admin by attempting to send an
# updated power level event.
new_pl_content = dict(pl_content)
new_pl_content["users"] = dict(pl_content.get("users", {}))
new_pl_content["users"][user_to_add] = new_pl_content["users"][admin_user_id]
fake_requester = create_requester(
admin_user_id,
authenticated_entity=requester.authenticated_entity,
)
try:
await self.event_creation_handler.create_and_send_nonmember_event(
fake_requester,
event_dict={
"content": new_pl_content,
"sender": admin_user_id,
"type": EventTypes.PowerLevels,
"state_key": "",
"room_id": room_id,
},
)
except AuthError:
# The admin user we found turned out not to have enough power.
raise SynapseError(
400, "No local admin user in room with power to update power levels."
)
# Now we check if the user we're granting admin rights to is already in
# the room. If not and it's not a public room we invite them.
member_event = room_state.get((EventTypes.Member, user_to_add))
is_joined = False
if member_event:
is_joined = member_event.content["membership"] in (
Membership.JOIN,
Membership.INVITE,
)
if is_joined:
return 200, {}
join_rules = room_state.get((EventTypes.JoinRules, ""))
is_public = False
if join_rules:
is_public = join_rules.content.get("join_rule") == JoinRules.PUBLIC
if is_public:
return 200, {}
await self.room_member_handler.update_membership(
fake_requester,
target=UserID.from_string(user_to_add),
room_id=room_id,
action=Membership.INVITE,
)
return 200, {}
|
async def on_POST(self, request, room_identifier):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
content = parse_json_object_from_request(request, allow_empty_body=True)
# Resolve to a room ID, if necessary.
if RoomID.is_valid(room_identifier):
room_id = room_identifier
elif RoomAlias.is_valid(room_identifier):
room_alias = RoomAlias.from_string(room_identifier)
room_id, _ = await self.room_member_handler.lookup_room_alias(room_alias)
room_id = room_id.to_string()
else:
raise SynapseError(
400, "%s was not legal room ID or room alias" % (room_identifier,)
)
# Which user to grant room admin rights to.
user_to_add = content.get("user_id", requester.user.to_string())
# Figure out which local users currently have power in the room, if any.
room_state = await self.state_handler.get_current_state(room_id)
if not room_state:
raise SynapseError(400, "Server not in room")
create_event = room_state[(EventTypes.Create, "")]
power_levels = room_state.get((EventTypes.PowerLevels, ""))
if power_levels is not None:
# We pick the local user with the highest power.
user_power = power_levels.content.get("users", {})
admin_users = [user_id for user_id in user_power if self.is_mine_id(user_id)]
admin_users.sort(key=lambda user: user_power[user])
if not admin_users:
raise SynapseError(400, "No local admin user in room")
admin_user_id = None
for admin_user in reversed(admin_users):
if room_state.get((EventTypes.Member, admin_user)):
admin_user_id = admin_user
break
if not admin_user_id:
raise SynapseError(
400,
"No local admin user in room",
)
pl_content = power_levels.content
else:
# If there is no power level events then the creator has rights.
pl_content = {}
admin_user_id = create_event.sender
if not self.is_mine_id(admin_user_id):
raise SynapseError(
400,
"No local admin user in room",
)
# Grant the user power equal to the room admin by attempting to send an
# updated power level event.
new_pl_content = dict(pl_content)
new_pl_content["users"] = dict(pl_content.get("users", {}))
new_pl_content["users"][user_to_add] = new_pl_content["users"][admin_user_id]
fake_requester = create_requester(
admin_user_id,
authenticated_entity=requester.authenticated_entity,
)
try:
await self.event_creation_handler.create_and_send_nonmember_event(
fake_requester,
event_dict={
"content": new_pl_content,
"sender": admin_user_id,
"type": EventTypes.PowerLevels,
"state_key": "",
"room_id": room_id,
},
)
except AuthError:
# The admin user we found turned out not to have enough power.
raise SynapseError(
400, "No local admin user in room with power to update power levels."
)
# Now we check if the user we're granting admin rights to is already in
# the room. If not and it's not a public room we invite them.
member_event = room_state.get((EventTypes.Member, user_to_add))
is_joined = False
if member_event:
is_joined = member_event.content["membership"] in (
Membership.JOIN,
Membership.INVITE,
)
if is_joined:
return 200, {}
join_rules = room_state.get((EventTypes.JoinRules, ""))
is_public = False
if join_rules:
is_public = join_rules.content.get("join_rule") == JoinRules.PUBLIC
if is_public:
return 200, {}
await self.room_member_handler.update_membership(
fake_requester,
target=UserID.from_string(user_to_add),
room_id=room_id,
action=Membership.INVITE,
)
return 200, {}
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
def __init__(self, hs: "HomeServer"):
super().__init__(hs)
self.hs = hs
self.auth = hs.get_auth()
self.store = hs.get_datastore()
|
def __init__(self, hs: "HomeServer"):
self.hs = hs
self.auth = hs.get_auth()
self.room_member_handler = hs.get_room_member_handler()
self.store = hs.get_datastore()
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
async def on_DELETE(
self, request: SynapseRequest, room_identifier: str
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
room_id, _ = await self.resolve_room_id(room_identifier)
deleted_count = await self.store.delete_forward_extremities_for_room(room_id)
return 200, {"deleted": deleted_count}
|
async def on_DELETE(self, request, room_identifier):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
room_id = await self.resolve_room_id(room_identifier)
deleted_count = await self.store.delete_forward_extremities_for_room(room_id)
return 200, {"deleted": deleted_count}
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
async def on_GET(
self, request: SynapseRequest, room_identifier: str
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
room_id, _ = await self.resolve_room_id(room_identifier)
extremities = await self.store.get_forward_extremities_for_room(room_id)
return 200, {"count": len(extremities), "results": extremities}
|
async def on_GET(self, request, room_identifier):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
room_id = await self.resolve_room_id(room_identifier)
extremities = await self.store.get_forward_extremities_for_room(room_id)
return 200, {"count": len(extremities), "results": extremities}
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
def __init__(self, hs: "HomeServer"):
super().__init__()
self.clock = hs.get_clock()
self.room_context_handler = hs.get_room_context_handler()
self._event_serializer = hs.get_event_client_serializer()
self.auth = hs.get_auth()
|
def __init__(self, hs):
super().__init__()
self.clock = hs.get_clock()
self.room_context_handler = hs.get_room_context_handler()
self._event_serializer = hs.get_event_client_serializer()
self.auth = hs.get_auth()
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
async def on_GET(
self, request: SynapseRequest, room_id: str, event_id: str
) -> Tuple[int, JsonDict]:
requester = await self.auth.get_user_by_req(request, allow_guest=False)
await assert_user_is_admin(self.auth, requester.user)
limit = parse_integer(request, "limit", default=10)
# picking the API shape for symmetry with /messages
filter_str = parse_string(request, b"filter", encoding="utf-8")
if filter_str:
filter_json = urlparse.unquote(filter_str)
event_filter = Filter(json_decoder.decode(filter_json)) # type: Optional[Filter]
else:
event_filter = None
results = await self.room_context_handler.get_event_context(
requester,
room_id,
event_id,
limit,
event_filter,
use_admin_priviledge=True,
)
if not results:
raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND)
time_now = self.clock.time_msec()
results["events_before"] = await self._event_serializer.serialize_events(
results["events_before"], time_now
)
results["event"] = await self._event_serializer.serialize_event(
results["event"], time_now
)
results["events_after"] = await self._event_serializer.serialize_events(
results["events_after"], time_now
)
results["state"] = await self._event_serializer.serialize_events(
results["state"], time_now
)
return 200, results
|
async def on_GET(self, request, room_id, event_id):
requester = await self.auth.get_user_by_req(request, allow_guest=False)
await assert_user_is_admin(self.auth, requester.user)
limit = parse_integer(request, "limit", default=10)
# picking the API shape for symmetry with /messages
filter_str = parse_string(request, b"filter", encoding="utf-8")
if filter_str:
filter_json = urlparse.unquote(filter_str)
event_filter = Filter(json_decoder.decode(filter_json)) # type: Optional[Filter]
else:
event_filter = None
results = await self.room_context_handler.get_event_context(
requester,
room_id,
event_id,
limit,
event_filter,
use_admin_priviledge=True,
)
if not results:
raise SynapseError(404, "Event not found.", errcode=Codes.NOT_FOUND)
time_now = self.clock.time_msec()
results["events_before"] = await self._event_serializer.serialize_events(
results["events_before"], time_now
)
results["event"] = await self._event_serializer.serialize_event(
results["event"], time_now
)
results["events_after"] = await self._event_serializer.serialize_events(
results["events_after"], time_now
)
results["state"] = await self._event_serializer.serialize_events(
results["state"], time_now
)
return 200, results
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
async def resolve_room_id(
self, room_identifier: str, remote_room_hosts: Optional[List[str]] = None
) -> Tuple[str, Optional[List[str]]]:
"""
Resolve a room identifier to a room ID, if necessary.
This also performanes checks to ensure the room ID is of the proper form.
Args:
room_identifier: The room ID or alias.
remote_room_hosts: The potential remote room hosts to use.
Returns:
The resolved room ID.
Raises:
SynapseError if the room ID is of the wrong form.
"""
if RoomID.is_valid(room_identifier):
resolved_room_id = room_identifier
elif RoomAlias.is_valid(room_identifier):
room_alias = RoomAlias.from_string(room_identifier)
(
room_id,
remote_room_hosts,
) = await self.room_member_handler.lookup_room_alias(room_alias)
resolved_room_id = room_id.to_string()
else:
raise SynapseError(
400, "%s was not legal room ID or room alias" % (room_identifier,)
)
if not resolved_room_id:
raise SynapseError(400, "Unknown room ID or room alias %s" % room_identifier)
return resolved_room_id, remote_room_hosts
|
async def resolve_room_id(self, room_identifier: str) -> str:
"""Resolve to a room ID, if necessary."""
if RoomID.is_valid(room_identifier):
resolved_room_id = room_identifier
elif RoomAlias.is_valid(room_identifier):
room_alias = RoomAlias.from_string(room_identifier)
room_id, _ = await self.room_member_handler.lookup_room_alias(room_alias)
resolved_room_id = room_id.to_string()
else:
raise SynapseError(
400, "%s was not legal room ID or room alias" % (room_identifier,)
)
if not resolved_room_id:
raise SynapseError(400, "Unknown room ID or room alias %s" % room_identifier)
return resolved_room_id
|
https://github.com/matrix-org/synapse/issues/9505
|
2021-02-26 14:01:23,554 - synapse.http.server - 94 - ERROR - POST-320 - Failed handle request via 'JoinRoomAliasServlet': <XForwardedForRequest at 0x7feef12ec358 method='POST' uri='/_synapse/admin/v1/join/%23test%3Aexemple.test.com' clientproto='HTTP/1.1' site='8008'>
Traceback (most recent call last):
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/failure.py", line 512, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/databases/main/event_federation.py", line 634, in get_latest_event_ids_in_room
desc="get_latest_event_ids_in_room",
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1453, in simple_select_onecol
db_autocommit=True,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 676, in runInteraction
**kwargs,
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 752, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/synapse2/env/lib/python3.6/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 746, in inner_func
return func(db_conn, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 540, in new_transaction
r = func(cursor, *args, **kwargs)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 1422, in simple_select_onecol_txn
txn.execute(sql, list(keyvalues.values()))
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 295, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/storage/database.py", line 321, in _do_execute
return func(sql, *args)
File "/opt/synapse2/env/lib/python3.6/site-packages/psycopg2/extensions.py", line 121, in getquoted
pobjs = [adapt(o) for o in self._seq]
File "/opt/synapse2/env/lib/python3.6/site-packages/synapse/types.py", line 220, in __iter__
raise ValueError("Attempted to iterate a %s" % (type(self).__name__,))
ValueError: Attempted to iterate a RoomID
|
ValueError
|
async def _unsafe_process(self) -> None:
# If self.pos is None then means we haven't fetched it from DB
if self.pos is None:
self.pos = await self.store.get_user_directory_stream_pos()
# If still None then the initial background update hasn't happened yet.
if self.pos is None:
return None
# Loop round handling deltas until we're up to date
while True:
with Measure(self.clock, "user_dir_delta"):
room_max_stream_ordering = self.store.get_room_max_stream_ordering()
if self.pos == room_max_stream_ordering:
return
logger.debug(
"Processing user stats %s->%s", self.pos, room_max_stream_ordering
)
max_pos, deltas = await self.store.get_current_state_deltas(
self.pos, room_max_stream_ordering
)
logger.debug("Handling %d state deltas", len(deltas))
await self._handle_deltas(deltas)
self.pos = max_pos
# Expose current event processing position to prometheus
synapse.metrics.event_processing_positions.labels("user_dir").set(max_pos)
await self.store.update_user_directory_stream_pos(max_pos)
|
async def _unsafe_process(self) -> None:
# If self.pos is None then means we haven't fetched it from DB
if self.pos is None:
self.pos = await self.store.get_user_directory_stream_pos()
# Loop round handling deltas until we're up to date
while True:
with Measure(self.clock, "user_dir_delta"):
room_max_stream_ordering = self.store.get_room_max_stream_ordering()
if self.pos == room_max_stream_ordering:
return
logger.debug(
"Processing user stats %s->%s", self.pos, room_max_stream_ordering
)
max_pos, deltas = await self.store.get_current_state_deltas(
self.pos, room_max_stream_ordering
)
logger.debug("Handling %d state deltas", len(deltas))
await self._handle_deltas(deltas)
self.pos = max_pos
# Expose current event processing position to prometheus
synapse.metrics.event_processing_positions.labels("user_dir").set(max_pos)
await self.store.update_user_directory_stream_pos(max_pos)
|
https://github.com/matrix-org/synapse/issues/9420
|
2021-02-16 17:36:09,420 - synapse.metrics.background_process_metrics - 211 - ERROR - user_directory.notify_new_event-9 - Background process 'user_directory.notify_new_event' threw an exception
Traceback (most recent call last):
File "/opt/venvs/matrix-synapse/lib/python3.7/site-packages/synapse/metrics/background_process_metrics.py", line 208, in run
return await maybe_awaitable(func(*args, **kwargs))
File "/opt/venvs/matrix-synapse/lib/python3.7/site-packages/synapse/handlers/user_directory.py", line 110, in process
await self._unsafe_process()
File "/opt/venvs/matrix-synapse/lib/python3.7/site-packages/synapse/handlers/user_directory.py", line 159, in _unsafe_process
self.pos, room_max_stream_ordering
File "/opt/venvs/matrix-synapse/lib/python3.7/site-packages/synapse/storage/databases/main/state_deltas.py", line 51, in get_current_state_deltas
prev_stream_id = int(prev_stream_id)
TypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType
|
TypeError
|
async def get_user_directory_stream_pos(self) -> Optional[int]:
"""
Get the stream ID of the user directory stream.
Returns:
The stream token or None if the initial background update hasn't happened yet.
"""
return await self.db_pool.simple_select_one_onecol(
table="user_directory_stream_pos",
keyvalues={},
retcol="stream_id",
desc="get_user_directory_stream_pos",
)
|
async def get_user_directory_stream_pos(self) -> int:
return await self.db_pool.simple_select_one_onecol(
table="user_directory_stream_pos",
keyvalues={},
retcol="stream_id",
desc="get_user_directory_stream_pos",
)
|
https://github.com/matrix-org/synapse/issues/9420
|
2021-02-16 17:36:09,420 - synapse.metrics.background_process_metrics - 211 - ERROR - user_directory.notify_new_event-9 - Background process 'user_directory.notify_new_event' threw an exception
Traceback (most recent call last):
File "/opt/venvs/matrix-synapse/lib/python3.7/site-packages/synapse/metrics/background_process_metrics.py", line 208, in run
return await maybe_awaitable(func(*args, **kwargs))
File "/opt/venvs/matrix-synapse/lib/python3.7/site-packages/synapse/handlers/user_directory.py", line 110, in process
await self._unsafe_process()
File "/opt/venvs/matrix-synapse/lib/python3.7/site-packages/synapse/handlers/user_directory.py", line 159, in _unsafe_process
self.pos, room_max_stream_ordering
File "/opt/venvs/matrix-synapse/lib/python3.7/site-packages/synapse/storage/databases/main/state_deltas.py", line 51, in get_current_state_deltas
prev_stream_id = int(prev_stream_id)
TypeError: int() argument must be a string, a bytes-like object or a number, not 'NoneType
|
TypeError
|
async def clone_existing_room(
self,
requester: Requester,
old_room_id: str,
new_room_id: str,
new_room_version: RoomVersion,
tombstone_event_id: str,
) -> None:
"""Populate a new room based on an old room
Args:
requester: the user requesting the upgrade
old_room_id : the id of the room to be replaced
new_room_id: the id to give the new room (should already have been
created with _gemerate_room_id())
new_room_version: the new room version to use
tombstone_event_id: the ID of the tombstone event in the old room.
"""
user_id = requester.user.to_string()
if not await self.spam_checker.user_may_create_room(user_id):
raise SynapseError(403, "You are not permitted to create rooms")
creation_content = {
"room_version": new_room_version.identifier,
"predecessor": {"room_id": old_room_id, "event_id": tombstone_event_id},
} # type: JsonDict
# Check if old room was non-federatable
# Get old room's create event
old_room_create_event = await self.store.get_create_event_for_room(old_room_id)
# Check if the create event specified a non-federatable room
if not old_room_create_event.content.get("m.federate", True):
# If so, mark the new room as non-federatable as well
creation_content["m.federate"] = False
initial_state = {}
# Replicate relevant room events
types_to_copy = (
(EventTypes.JoinRules, ""),
(EventTypes.Name, ""),
(EventTypes.Topic, ""),
(EventTypes.RoomHistoryVisibility, ""),
(EventTypes.GuestAccess, ""),
(EventTypes.RoomAvatar, ""),
(EventTypes.RoomEncryption, ""),
(EventTypes.ServerACL, ""),
(EventTypes.RelatedGroups, ""),
(EventTypes.PowerLevels, ""),
)
old_room_state_ids = await self.store.get_filtered_current_state_ids(
old_room_id, StateFilter.from_types(types_to_copy)
)
# map from event_id to BaseEvent
old_room_state_events = await self.store.get_events(old_room_state_ids.values())
for k, old_event_id in old_room_state_ids.items():
old_event = old_room_state_events.get(old_event_id)
if old_event:
initial_state[k] = old_event.content
# deep-copy the power-levels event before we start modifying it
# note that if frozen_dicts are enabled, `power_levels` will be a frozen
# dict so we can't just copy.deepcopy it.
initial_state[(EventTypes.PowerLevels, "")] = power_levels = (
copy_power_levels_contents(initial_state[(EventTypes.PowerLevels, "")])
)
# Resolve the minimum power level required to send any state event
# We will give the upgrading user this power level temporarily (if necessary) such that
# they are able to copy all of the state events over, then revert them back to their
# original power level afterwards in _update_upgraded_room_pls
# Copy over user power levels now as this will not be possible with >100PL users once
# the room has been created
# Calculate the minimum power level needed to clone the room
event_power_levels = power_levels.get("events", {})
state_default = power_levels.get("state_default", 50)
ban = power_levels.get("ban", 50)
needed_power_level = max(state_default, ban, max(event_power_levels.values()))
# Get the user's current power level, this matches the logic in get_user_power_level,
# but without the entire state map.
user_power_levels = power_levels.setdefault("users", {})
users_default = power_levels.get("users_default", 0)
current_power_level = user_power_levels.get(user_id, users_default)
# Raise the requester's power level in the new room if necessary
if current_power_level < needed_power_level:
user_power_levels[user_id] = needed_power_level
await self._send_events_for_new_room(
requester,
new_room_id,
# we expect to override all the presets with initial_state, so this is
# somewhat arbitrary.
preset_config=RoomCreationPreset.PRIVATE_CHAT,
invite_list=[],
initial_state=initial_state,
creation_content=creation_content,
ratelimit=False,
)
# Transfer membership events
old_room_member_state_ids = await self.store.get_filtered_current_state_ids(
old_room_id, StateFilter.from_types([(EventTypes.Member, None)])
)
# map from event_id to BaseEvent
old_room_member_state_events = await self.store.get_events(
old_room_member_state_ids.values()
)
for old_event in old_room_member_state_events.values():
# Only transfer ban events
if (
"membership" in old_event.content
and old_event.content["membership"] == "ban"
):
await self.room_member_handler.update_membership(
requester,
UserID.from_string(old_event["state_key"]),
new_room_id,
"ban",
ratelimit=False,
content=old_event.content,
)
|
async def clone_existing_room(
self,
requester: Requester,
old_room_id: str,
new_room_id: str,
new_room_version: RoomVersion,
tombstone_event_id: str,
) -> None:
"""Populate a new room based on an old room
Args:
requester: the user requesting the upgrade
old_room_id : the id of the room to be replaced
new_room_id: the id to give the new room (should already have been
created with _gemerate_room_id())
new_room_version: the new room version to use
tombstone_event_id: the ID of the tombstone event in the old room.
"""
user_id = requester.user.to_string()
if not await self.spam_checker.user_may_create_room(user_id):
raise SynapseError(403, "You are not permitted to create rooms")
creation_content = {
"room_version": new_room_version.identifier,
"predecessor": {"room_id": old_room_id, "event_id": tombstone_event_id},
} # type: JsonDict
# Check if old room was non-federatable
# Get old room's create event
old_room_create_event = await self.store.get_create_event_for_room(old_room_id)
# Check if the create event specified a non-federatable room
if not old_room_create_event.content.get("m.federate", True):
# If so, mark the new room as non-federatable as well
creation_content["m.federate"] = False
initial_state = {}
# Replicate relevant room events
types_to_copy = (
(EventTypes.JoinRules, ""),
(EventTypes.Name, ""),
(EventTypes.Topic, ""),
(EventTypes.RoomHistoryVisibility, ""),
(EventTypes.GuestAccess, ""),
(EventTypes.RoomAvatar, ""),
(EventTypes.RoomEncryption, ""),
(EventTypes.ServerACL, ""),
(EventTypes.RelatedGroups, ""),
(EventTypes.PowerLevels, ""),
)
old_room_state_ids = await self.store.get_filtered_current_state_ids(
old_room_id, StateFilter.from_types(types_to_copy)
)
# map from event_id to BaseEvent
old_room_state_events = await self.store.get_events(old_room_state_ids.values())
for k, old_event_id in old_room_state_ids.items():
old_event = old_room_state_events.get(old_event_id)
if old_event:
initial_state[k] = old_event.content
# deep-copy the power-levels event before we start modifying it
# note that if frozen_dicts are enabled, `power_levels` will be a frozen
# dict so we can't just copy.deepcopy it.
initial_state[(EventTypes.PowerLevels, "")] = power_levels = (
copy_power_levels_contents(initial_state[(EventTypes.PowerLevels, "")])
)
# Resolve the minimum power level required to send any state event
# We will give the upgrading user this power level temporarily (if necessary) such that
# they are able to copy all of the state events over, then revert them back to their
# original power level afterwards in _update_upgraded_room_pls
# Copy over user power levels now as this will not be possible with >100PL users once
# the room has been created
# Calculate the minimum power level needed to clone the room
event_power_levels = power_levels.get("events", {})
state_default = power_levels.get("state_default", 0)
ban = power_levels.get("ban")
needed_power_level = max(state_default, ban, max(event_power_levels.values()))
# Raise the requester's power level in the new room if necessary
current_power_level = power_levels["users"][user_id]
if current_power_level < needed_power_level:
power_levels["users"][user_id] = needed_power_level
await self._send_events_for_new_room(
requester,
new_room_id,
# we expect to override all the presets with initial_state, so this is
# somewhat arbitrary.
preset_config=RoomCreationPreset.PRIVATE_CHAT,
invite_list=[],
initial_state=initial_state,
creation_content=creation_content,
ratelimit=False,
)
# Transfer membership events
old_room_member_state_ids = await self.store.get_filtered_current_state_ids(
old_room_id, StateFilter.from_types([(EventTypes.Member, None)])
)
# map from event_id to BaseEvent
old_room_member_state_events = await self.store.get_events(
old_room_member_state_ids.values()
)
for old_event in old_room_member_state_events.values():
# Only transfer ban events
if (
"membership" in old_event.content
and old_event.content["membership"] == "ban"
):
await self.room_member_handler.update_membership(
requester,
UserID.from_string(old_event["state_key"]),
new_room_id,
"ban",
ratelimit=False,
content=old_event.content,
)
|
https://github.com/matrix-org/synapse/issues/9378
|
2021-02-10 21:24:57,160 - synapse.http.server - 91 - ERROR - POST-269- Failed handle request via 'RoomUpgradeRestServlet': <XForwardedForRequest at 0x7ff64c3a6520 method='POST' uri='/_matrix/client/r0/rooms/!GvvSMoCBZYwiTcVaOt%3Aamorgan.xyz/upgrade' clientproto='HTTP/1.0' site='8008'>
Traceback (most recent call last):
File "/opt/synapse/lib/python3.8/site-packages/synapse/http/server.py", line 252, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/opt/synapse/lib/python3.8/site-packages/synapse/http/server.py", line 430, in _async_render
callback_return = await raw_callback_return
File "/opt/synapse/lib/python3.8/site-packages/synapse/rest/client/v2_alpha/room_upgrade_rest_servlet.py", line 76, in on_POST
new_room_id = await self._room_creation_handler.upgrade_room(
File "/opt/synapse/lib/python3.8/site-packages/synapse/handlers/room.py", line 171, in upgrade_room
ret = await self._upgrade_response_cache.wrap(
File "/opt/synapse/lib/python3.8/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/opt/synapse/lib/python3.8/site-packages/synapse/handlers/room.py", line 229, in _upgrade_room
await self.clone_existing_room(
File "/opt/synapse/lib/python3.8/site-packages/synapse/handlers/room.py", line 433, in clone_existing_room
needed_power_level = max(state_default, ban, max(event_power_levels.values()))
TypeError: '>' not supported between instances of 'NoneType' and 'int'
|
TypeError
|
async def delete_pusher_by_app_id_pushkey_user_id(
self, app_id: str, pushkey: str, user_id: str
) -> None:
def delete_pusher_txn(txn, stream_id):
self._invalidate_cache_and_stream( # type: ignore
txn, self.get_if_user_has_pusher, (user_id,)
)
# It is expected that there is exactly one pusher to delete, but
# if it isn't there (or there are multiple) delete them all.
self.db_pool.simple_delete_txn(
txn,
"pushers",
{"app_id": app_id, "pushkey": pushkey, "user_name": user_id},
)
# it's possible for us to end up with duplicate rows for
# (app_id, pushkey, user_id) at different stream_ids, but that
# doesn't really matter.
self.db_pool.simple_insert_txn(
txn,
table="deleted_pushers",
values={
"stream_id": stream_id,
"app_id": app_id,
"pushkey": pushkey,
"user_id": user_id,
},
)
async with self._pushers_id_gen.get_next() as stream_id:
await self.db_pool.runInteraction("delete_pusher", delete_pusher_txn, stream_id)
|
async def delete_pusher_by_app_id_pushkey_user_id(
self, app_id: str, pushkey: str, user_id: str
) -> None:
def delete_pusher_txn(txn, stream_id):
self._invalidate_cache_and_stream( # type: ignore
txn, self.get_if_user_has_pusher, (user_id,)
)
self.db_pool.simple_delete_one_txn(
txn,
"pushers",
{"app_id": app_id, "pushkey": pushkey, "user_name": user_id},
)
# it's possible for us to end up with duplicate rows for
# (app_id, pushkey, user_id) at different stream_ids, but that
# doesn't really matter.
self.db_pool.simple_insert_txn(
txn,
table="deleted_pushers",
values={
"stream_id": stream_id,
"app_id": app_id,
"pushkey": pushkey,
"user_id": user_id,
},
)
async with self._pushers_id_gen.get_next() as stream_id:
await self.db_pool.runInteraction("delete_pusher", delete_pusher_txn, stream_id)
|
https://github.com/matrix-org/synapse/issues/5101
|
2019-04-26 13:17:52,980 - synapse.push.httppusher - 144 - ERROR - httppush.process-34525- Exception processing notifs
Traceback (most recent call last):
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/push/httppusher.py", line 142, in _process
yield self._unsafe_process()
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/failure.py", line 491, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/push/httppusher.py", line 178, in _unsafe_process
self.clock.time_msec()
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/failure.py", line 491, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/pusher.py", line 307, in update_pusher_last_stream_ordering_and_success
desc="update_pusher_last_stream_ordering_and_success",
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/failure.py", line 491, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 393, in runInteraction
*args, **kwargs
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/failure.py", line 491, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 442, in runWithConnection
inner_func, *args, **kwargs
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 438, in inner_func
return func(conn, *args, **kwargs)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 314, in _new_transaction
r = func(txn, *args, **kwargs)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 1031, in _simple_update_one_txn
raise StoreError(404, "No row found (%s)" % (table,))
synapse.api.errors.StoreError: 404: No row found (pushers)
|
synapse.api.errors.StoreError
|
def delete_pusher_txn(txn, stream_id):
self._invalidate_cache_and_stream( # type: ignore
txn, self.get_if_user_has_pusher, (user_id,)
)
# It is expected that there is exactly one pusher to delete, but
# if it isn't there (or there are multiple) delete them all.
self.db_pool.simple_delete_txn(
txn,
"pushers",
{"app_id": app_id, "pushkey": pushkey, "user_name": user_id},
)
# it's possible for us to end up with duplicate rows for
# (app_id, pushkey, user_id) at different stream_ids, but that
# doesn't really matter.
self.db_pool.simple_insert_txn(
txn,
table="deleted_pushers",
values={
"stream_id": stream_id,
"app_id": app_id,
"pushkey": pushkey,
"user_id": user_id,
},
)
|
def delete_pusher_txn(txn, stream_id):
self._invalidate_cache_and_stream( # type: ignore
txn, self.get_if_user_has_pusher, (user_id,)
)
self.db_pool.simple_delete_one_txn(
txn,
"pushers",
{"app_id": app_id, "pushkey": pushkey, "user_name": user_id},
)
# it's possible for us to end up with duplicate rows for
# (app_id, pushkey, user_id) at different stream_ids, but that
# doesn't really matter.
self.db_pool.simple_insert_txn(
txn,
table="deleted_pushers",
values={
"stream_id": stream_id,
"app_id": app_id,
"pushkey": pushkey,
"user_id": user_id,
},
)
|
https://github.com/matrix-org/synapse/issues/5101
|
2019-04-26 13:17:52,980 - synapse.push.httppusher - 144 - ERROR - httppush.process-34525- Exception processing notifs
Traceback (most recent call last):
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/push/httppusher.py", line 142, in _process
yield self._unsafe_process()
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/failure.py", line 491, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/push/httppusher.py", line 178, in _unsafe_process
self.clock.time_msec()
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/failure.py", line 491, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/pusher.py", line 307, in update_pusher_last_stream_ordering_and_success
desc="update_pusher_last_stream_ordering_and_success",
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/failure.py", line 491, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 393, in runInteraction
*args, **kwargs
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/internet/defer.py", line 1416, in _inlineCallbacks
result = result.throwExceptionIntoGenerator(g)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/failure.py", line 491, in throwExceptionIntoGenerator
return g.throw(self.type, self.value, self.tb)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 442, in runWithConnection
inner_func, *args, **kwargs
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 438, in inner_func
return func(conn, *args, **kwargs)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 314, in _new_transaction
r = func(txn, *args, **kwargs)
File "/opt/venvs/matrix-synapse/lib/python3.5/site-packages/synapse/storage/_base.py", line 1031, in _simple_update_one_txn
raise StoreError(404, "No row found (%s)" % (table,))
synapse.api.errors.StoreError: 404: No row found (pushers)
|
synapse.api.errors.StoreError
|
def sorted_topologically(
nodes: Iterable[T],
graph: Mapping[T, Collection[T]],
) -> Generator[T, None, None]:
"""Given a set of nodes and a graph, yield the nodes in toplogical order.
For example `sorted_topologically([1, 2], {1: [2]})` will yield `2, 1`.
"""
# This is implemented by Kahn's algorithm.
degree_map = {node: 0 for node in nodes}
reverse_graph = {} # type: Dict[T, Set[T]]
for node, edges in graph.items():
if node not in degree_map:
continue
for edge in set(edges):
if edge in degree_map:
degree_map[node] += 1
reverse_graph.setdefault(edge, set()).add(node)
reverse_graph.setdefault(node, set())
zero_degree = [node for node, degree in degree_map.items() if degree == 0]
heapq.heapify(zero_degree)
while zero_degree:
node = heapq.heappop(zero_degree)
yield node
for edge in reverse_graph.get(node, []):
if edge in degree_map:
degree_map[edge] -= 1
if degree_map[edge] == 0:
heapq.heappush(zero_degree, edge)
|
def sorted_topologically(
nodes: Iterable[T],
graph: Mapping[T, Collection[T]],
) -> Generator[T, None, None]:
"""Given a set of nodes and a graph, yield the nodes in toplogical order.
For example `sorted_topologically([1, 2], {1: [2]})` will yield `2, 1`.
"""
# This is implemented by Kahn's algorithm.
degree_map = {node: 0 for node in nodes}
reverse_graph = {} # type: Dict[T, Set[T]]
for node, edges in graph.items():
if node not in degree_map:
continue
for edge in edges:
if edge in degree_map:
degree_map[node] += 1
reverse_graph.setdefault(edge, set()).add(node)
reverse_graph.setdefault(node, set())
zero_degree = [node for node, degree in degree_map.items() if degree == 0]
heapq.heapify(zero_degree)
while zero_degree:
node = heapq.heappop(zero_degree)
yield node
for edge in reverse_graph.get(node, []):
if edge in degree_map:
degree_map[edge] -= 1
if degree_map[edge] == 0:
heapq.heappush(zero_degree, edge)
|
https://github.com/matrix-org/synapse/issues/9208
|
ΡΠ½Π² 22 19:05:51 stratofortress.nexus.i.intelfx.name synapse[373164]: synapse.storage.background_updates: [background_updates-0] Starting update batch on background update 'chain_cover'
ΡΠ½Π² 22 19:05:51 stratofortress.nexus.i.intelfx.name synapse[373164]: synapse.storage.background_updates: [background_updates-0] Error doing update
Traceback (most recent call last):
File "/usr/lib/python3.9/site-packages/synapse/storage/background_updates.py", line 116, in run_background_updates
result = await self.do_next_background_update(
File "/usr/lib/python3.9/site-packages/synapse/storage/background_updates.py", line 227, in do_next_background_update
await self._do_background_update(desired_duration_ms)
File "/usr/lib/python3.9/site-packages/synapse/storage/background_updates.py", line 264, in _do_background_update
items_updated = await update_handler(progress, batch_size)
File "/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events_bg_updates.py", line 748, in _chain_cover_index
result = await self.db_pool.runInteraction(
File "/usr/lib/python3.9/site-packages/synapse/storage/database.py", line 656, in runInteraction
result = await self.runWithConnection(
File "/usr/lib/python3.9/site-packages/synapse/storage/database.py", line 739, in runWithConnection
return await make_deferred_yieldable(
File "/usr/lib/python3.9/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/usr/lib/python3.9/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/usr/lib/python3.9/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/usr/lib/python3.9/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/usr/lib/python3.9/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/usr/lib/python3.9/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/usr/lib/python3.9/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/usr/lib/python3.9/site-packages/synapse/storage/database.py", line 734, in inner_func
return func(db_conn, *args, **kwargs)
File "/usr/lib/python3.9/site-packages/synapse/storage/database.py", line 534, in new_transaction
r = func(cursor, *args, **kwargs)
File "/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events_bg_updates.py", line 920, in _calculate_chain_cover_txn
PersistEventsStore._add_chain_cover_index(
File "/usr/lib/python3.9/site-packages/synapse/storage/databases/main/events.py", line 634, in _add_chain_cover_index
existing_chain_id = chain_map[auth_id]
KeyError: '$2PBTqUSs6gQtrZ3jZW8xVUSHvDbUYR3TKpFBoDHHwJk'
|
KeyError
|
async def get_file(
self,
url: str,
output_stream: BinaryIO,
max_size: Optional[int] = None,
headers: Optional[RawHeaders] = None,
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
"""GETs a file from a given URL
Args:
url: The URL to GET
output_stream: File to write the response body to.
headers: A map from header name to a list of values for that header
Returns:
A tuple of the file length, dict of the response
headers, absolute URI of the response and HTTP response code.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
SynapseError: if the response is not a 2xx, the remote file is too large, or
another exception happens during the download.
"""
actual_headers = {b"User-Agent": [self.user_agent]}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request("GET", url, headers=Headers(actual_headers))
resp_headers = dict(response.headers.getAllRawHeaders())
if (
b"Content-Length" in resp_headers
and max_size
and int(resp_headers[b"Content-Length"][0]) > max_size
):
logger.warning("Requested URL is too large > %r bytes" % (max_size,))
raise SynapseError(
502,
"Requested file is too large > %r bytes" % (max_size,),
Codes.TOO_LARGE,
)
if response.code > 299:
logger.warning("Got %d when downloading %s" % (response.code, url))
raise SynapseError(502, "Got error %d" % (response.code,), Codes.UNKNOWN)
# TODO: if our Content-Type is HTML or something, just read the first
# N bytes into RAM rather than saving it all to disk only to read it
# straight back in again
try:
length = await make_deferred_yieldable(
read_body_with_max_size(response, output_stream, max_size)
)
except BodyExceededMaxSize:
raise SynapseError(
502,
"Requested file is too large > %r bytes" % (max_size,),
Codes.TOO_LARGE,
)
except Exception as e:
raise SynapseError(502, ("Failed to download remote body: %s" % e)) from e
return (
length,
resp_headers,
response.request.absoluteURI.decode("ascii"),
response.code,
)
|
async def get_file(
self,
url: str,
output_stream: BinaryIO,
max_size: Optional[int] = None,
headers: Optional[RawHeaders] = None,
) -> Tuple[int, Dict[bytes, List[bytes]], str, int]:
"""GETs a file from a given URL
Args:
url: The URL to GET
output_stream: File to write the response body to.
headers: A map from header name to a list of values for that header
Returns:
A tuple of the file length, dict of the response
headers, absolute URI of the response and HTTP response code.
Raises:
RequestTimedOutError: if there is a timeout before the response headers
are received. Note there is currently no timeout on reading the response
body.
SynapseError: if the response is not a 2xx, the remote file is too large, or
another exception happens during the download.
"""
actual_headers = {b"User-Agent": [self.user_agent]}
if headers:
actual_headers.update(headers) # type: ignore
response = await self.request("GET", url, headers=Headers(actual_headers))
resp_headers = dict(response.headers.getAllRawHeaders())
if (
b"Content-Length" in resp_headers
and max_size
and int(resp_headers[b"Content-Length"][0]) > max_size
):
logger.warning("Requested URL is too large > %r bytes" % (max_size,))
raise SynapseError(
502,
"Requested file is too large > %r bytes" % (max_size,),
Codes.TOO_LARGE,
)
if response.code > 299:
logger.warning("Got %d when downloading %s" % (response.code, url))
raise SynapseError(502, "Got error %d" % (response.code,), Codes.UNKNOWN)
# TODO: if our Content-Type is HTML or something, just read the first
# N bytes into RAM rather than saving it all to disk only to read it
# straight back in again
try:
length = await make_deferred_yieldable(
read_body_with_max_size(response, output_stream, max_size)
)
except BodyExceededMaxSize:
SynapseError(
502,
"Requested file is too large > %r bytes" % (max_size,),
Codes.TOO_LARGE,
)
except Exception as e:
raise SynapseError(502, ("Failed to download remote body: %s" % e)) from e
return (
length,
resp_headers,
response.request.absoluteURI.decode("ascii"),
response.code,
)
|
https://github.com/matrix-org/synapse/issues/9132
|
2021-01-15 20:32:45,345 - synapse.http.matrixfederationclient - 987 - WARNING - GET-25- {GET-O-1} [matrix.org] Requested file is too large > 10485760 bytes
2021-01-15 20:32:45,345 - synapse.rest.media.v1.media_repository - 417 - ERROR - GET-25- Failed to fetch remote media matrix.org/cPeSAplLYzzcKlpJjLtwlzrT
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/synapse/rest/media/v1/media_repository.py", line 384, in _download_remote_file
"allow_remote": "false"
File "/usr/local/lib/python3.7/site-packages/synapse/http/matrixfederationclient.py", line 1004, in get_file
length,
UnboundLocalError: local variable 'length' referenced before assignment
|
UnboundLocalError
|
async def get_file(
self,
destination: str,
path: str,
output_stream,
args: Optional[QueryArgs] = None,
retry_on_dns_fail: bool = True,
max_size: Optional[int] = None,
ignore_backoff: bool = False,
) -> Tuple[int, Dict[bytes, List[bytes]]]:
"""GETs a file from a given homeserver
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path to GET.
output_stream: File to write the response body to.
args: Optional dictionary used to create the query string.
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
Returns:
Resolves with an (int,dict) tuple of
the file length and a dict of the response headers.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="GET", destination=destination, path=path, query=args
)
response = await self._send_request(
request, retry_on_dns_fail=retry_on_dns_fail, ignore_backoff=ignore_backoff
)
headers = dict(response.headers.getAllRawHeaders())
try:
d = read_body_with_max_size(response, output_stream, max_size)
d.addTimeout(self.default_timeout, self.reactor)
length = await make_deferred_yieldable(d)
except BodyExceededMaxSize:
msg = "Requested file is too large > %r bytes" % (max_size,)
logger.warning(
"{%s} [%s] %s",
request.txn_id,
request.destination,
msg,
)
raise SynapseError(502, msg, Codes.TOO_LARGE)
except Exception as e:
logger.warning(
"{%s} [%s] Error reading response: %s",
request.txn_id,
request.destination,
e,
)
raise
logger.info(
"{%s} [%s] Completed: %d %s [%d bytes] %s %s",
request.txn_id,
request.destination,
response.code,
response.phrase.decode("ascii", errors="replace"),
length,
request.method,
request.uri.decode("ascii"),
)
return (length, headers)
|
async def get_file(
self,
destination: str,
path: str,
output_stream,
args: Optional[QueryArgs] = None,
retry_on_dns_fail: bool = True,
max_size: Optional[int] = None,
ignore_backoff: bool = False,
) -> Tuple[int, Dict[bytes, List[bytes]]]:
"""GETs a file from a given homeserver
Args:
destination: The remote server to send the HTTP request to.
path: The HTTP path to GET.
output_stream: File to write the response body to.
args: Optional dictionary used to create the query string.
ignore_backoff: true to ignore the historical backoff data
and try the request anyway.
Returns:
Resolves with an (int,dict) tuple of
the file length and a dict of the response headers.
Raises:
HttpResponseException: If we get an HTTP response code >= 300
(except 429).
NotRetryingDestination: If we are not yet ready to retry this
server.
FederationDeniedError: If this destination is not on our
federation whitelist
RequestSendFailed: If there were problems connecting to the
remote, due to e.g. DNS failures, connection timeouts etc.
"""
request = MatrixFederationRequest(
method="GET", destination=destination, path=path, query=args
)
response = await self._send_request(
request, retry_on_dns_fail=retry_on_dns_fail, ignore_backoff=ignore_backoff
)
headers = dict(response.headers.getAllRawHeaders())
try:
d = read_body_with_max_size(response, output_stream, max_size)
d.addTimeout(self.default_timeout, self.reactor)
length = await make_deferred_yieldable(d)
except BodyExceededMaxSize:
msg = "Requested file is too large > %r bytes" % (max_size,)
logger.warning(
"{%s} [%s] %s",
request.txn_id,
request.destination,
msg,
)
SynapseError(502, msg, Codes.TOO_LARGE)
except Exception as e:
logger.warning(
"{%s} [%s] Error reading response: %s",
request.txn_id,
request.destination,
e,
)
raise
logger.info(
"{%s} [%s] Completed: %d %s [%d bytes] %s %s",
request.txn_id,
request.destination,
response.code,
response.phrase.decode("ascii", errors="replace"),
length,
request.method,
request.uri.decode("ascii"),
)
return (length, headers)
|
https://github.com/matrix-org/synapse/issues/9132
|
2021-01-15 20:32:45,345 - synapse.http.matrixfederationclient - 987 - WARNING - GET-25- {GET-O-1} [matrix.org] Requested file is too large > 10485760 bytes
2021-01-15 20:32:45,345 - synapse.rest.media.v1.media_repository - 417 - ERROR - GET-25- Failed to fetch remote media matrix.org/cPeSAplLYzzcKlpJjLtwlzrT
Traceback (most recent call last):
File "/usr/local/lib/python3.7/site-packages/synapse/rest/media/v1/media_repository.py", line 384, in _download_remote_file
"allow_remote": "false"
File "/usr/local/lib/python3.7/site-packages/synapse/http/matrixfederationclient.py", line 1004, in get_file
length,
UnboundLocalError: local variable 'length' referenced before assignment
|
UnboundLocalError
|
async def on_PUT(self, request, user_id):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
target_user = UserID.from_string(user_id)
body = parse_json_object_from_request(request)
if not self.hs.is_mine(target_user):
raise SynapseError(400, "This endpoint can only be used with local users")
user = await self.admin_handler.get_user(target_user)
user_id = target_user.to_string()
if user: # modify user
if "displayname" in body:
await self.profile_handler.set_displayname(
target_user, requester, body["displayname"], True
)
if "threepids" in body:
# check for required parameters for each threepid
for threepid in body["threepids"]:
assert_params_in_dict(threepid, ["medium", "address"])
# remove old threepids from user
threepids = await self.store.user_get_threepids(user_id)
for threepid in threepids:
try:
await self.auth_handler.delete_threepid(
user_id, threepid["medium"], threepid["address"], None
)
except Exception:
logger.exception("Failed to remove threepids")
raise SynapseError(500, "Failed to remove threepids")
# add new threepids to user
current_time = self.hs.get_clock().time_msec()
for threepid in body["threepids"]:
await self.auth_handler.add_threepid(
user_id, threepid["medium"], threepid["address"], current_time
)
if "avatar_url" in body and type(body["avatar_url"]) == str:
await self.profile_handler.set_avatar_url(
target_user, requester, body["avatar_url"], True
)
if "admin" in body:
set_admin_to = bool(body["admin"])
if set_admin_to != user["admin"]:
auth_user = requester.user
if target_user == auth_user and not set_admin_to:
raise SynapseError(400, "You may not demote yourself.")
await self.store.set_server_admin(target_user, set_admin_to)
if "password" in body:
if not isinstance(body["password"], str) or len(body["password"]) > 512:
raise SynapseError(400, "Invalid password")
else:
new_password = body["password"]
logout_devices = True
new_password_hash = await self.auth_handler.hash(new_password)
await self.set_password_handler.set_password(
target_user.to_string(),
new_password_hash,
logout_devices,
requester,
)
if "deactivated" in body:
deactivate = body["deactivated"]
if not isinstance(deactivate, bool):
raise SynapseError(
400, "'deactivated' parameter is not of type boolean"
)
if deactivate and not user["deactivated"]:
await self.deactivate_account_handler.deactivate_account(
target_user.to_string(), False
)
elif not deactivate and user["deactivated"]:
if "password" not in body:
raise SynapseError(
400, "Must provide a password to re-activate an account."
)
await self.deactivate_account_handler.activate_account(
target_user.to_string()
)
user = await self.admin_handler.get_user(target_user)
return 200, user
else: # create user
password = body.get("password")
password_hash = None
if password is not None:
if not isinstance(password, str) or len(password) > 512:
raise SynapseError(400, "Invalid password")
password_hash = await self.auth_handler.hash(password)
admin = body.get("admin", None)
user_type = body.get("user_type", None)
displayname = body.get("displayname", None)
if user_type is not None and user_type not in UserTypes.ALL_USER_TYPES:
raise SynapseError(400, "Invalid user type")
user_id = await self.registration_handler.register_user(
localpart=target_user.localpart,
password_hash=password_hash,
admin=bool(admin),
default_display_name=displayname,
user_type=user_type,
by_admin=True,
)
if "threepids" in body:
# check for required parameters for each threepid
for threepid in body["threepids"]:
assert_params_in_dict(threepid, ["medium", "address"])
current_time = self.hs.get_clock().time_msec()
for threepid in body["threepids"]:
await self.auth_handler.add_threepid(
user_id, threepid["medium"], threepid["address"], current_time
)
if (
self.hs.config.email_enable_notifs
and self.hs.config.email_notif_for_new_users
):
await self.pusher_pool.add_pusher(
user_id=user_id,
access_token=None,
kind="email",
app_id="m.email",
app_display_name="Email Notifications",
device_display_name=threepid["address"],
pushkey=threepid["address"],
lang=None, # We don't know a user's language here
data={},
)
if "avatar_url" in body and isinstance(body["avatar_url"], str):
await self.profile_handler.set_avatar_url(
target_user, requester, body["avatar_url"], True
)
ret = await self.admin_handler.get_user(target_user)
return 201, ret
|
async def on_PUT(self, request, user_id):
requester = await self.auth.get_user_by_req(request)
await assert_user_is_admin(self.auth, requester.user)
target_user = UserID.from_string(user_id)
body = parse_json_object_from_request(request)
if not self.hs.is_mine(target_user):
raise SynapseError(400, "This endpoint can only be used with local users")
user = await self.admin_handler.get_user(target_user)
user_id = target_user.to_string()
if user: # modify user
if "displayname" in body:
await self.profile_handler.set_displayname(
target_user, requester, body["displayname"], True
)
if "threepids" in body:
# check for required parameters for each threepid
for threepid in body["threepids"]:
assert_params_in_dict(threepid, ["medium", "address"])
# remove old threepids from user
threepids = await self.store.user_get_threepids(user_id)
for threepid in threepids:
try:
await self.auth_handler.delete_threepid(
user_id, threepid["medium"], threepid["address"], None
)
except Exception:
logger.exception("Failed to remove threepids")
raise SynapseError(500, "Failed to remove threepids")
# add new threepids to user
current_time = self.hs.get_clock().time_msec()
for threepid in body["threepids"]:
await self.auth_handler.add_threepid(
user_id, threepid["medium"], threepid["address"], current_time
)
if "avatar_url" in body and type(body["avatar_url"]) == str:
await self.profile_handler.set_avatar_url(
target_user, requester, body["avatar_url"], True
)
if "admin" in body:
set_admin_to = bool(body["admin"])
if set_admin_to != user["admin"]:
auth_user = requester.user
if target_user == auth_user and not set_admin_to:
raise SynapseError(400, "You may not demote yourself.")
await self.store.set_server_admin(target_user, set_admin_to)
if "password" in body:
if not isinstance(body["password"], str) or len(body["password"]) > 512:
raise SynapseError(400, "Invalid password")
else:
new_password = body["password"]
logout_devices = True
new_password_hash = await self.auth_handler.hash(new_password)
await self.set_password_handler.set_password(
target_user.to_string(),
new_password_hash,
logout_devices,
requester,
)
if "deactivated" in body:
deactivate = body["deactivated"]
if not isinstance(deactivate, bool):
raise SynapseError(
400, "'deactivated' parameter is not of type boolean"
)
if deactivate and not user["deactivated"]:
await self.deactivate_account_handler.deactivate_account(
target_user.to_string(), False
)
elif not deactivate and user["deactivated"]:
if "password" not in body:
raise SynapseError(
400, "Must provide a password to re-activate an account."
)
await self.deactivate_account_handler.activate_account(
target_user.to_string()
)
user = await self.admin_handler.get_user(target_user)
return 200, user
else: # create user
password = body.get("password")
password_hash = None
if password is not None:
if not isinstance(password, str) or len(password) > 512:
raise SynapseError(400, "Invalid password")
password_hash = await self.auth_handler.hash(password)
admin = body.get("admin", None)
user_type = body.get("user_type", None)
displayname = body.get("displayname", None)
if user_type is not None and user_type not in UserTypes.ALL_USER_TYPES:
raise SynapseError(400, "Invalid user type")
user_id = await self.registration_handler.register_user(
localpart=target_user.localpart,
password_hash=password_hash,
admin=bool(admin),
default_display_name=displayname,
user_type=user_type,
by_admin=True,
)
if "threepids" in body:
# check for required parameters for each threepid
for threepid in body["threepids"]:
assert_params_in_dict(threepid, ["medium", "address"])
current_time = self.hs.get_clock().time_msec()
for threepid in body["threepids"]:
await self.auth_handler.add_threepid(
user_id, threepid["medium"], threepid["address"], current_time
)
if (
self.hs.config.email_enable_notifs
and self.hs.config.email_notif_for_new_users
):
await self.pusher_pool.add_pusher(
user_id=user_id,
access_token=None,
kind="email",
app_id="m.email",
app_display_name="Email Notifications",
device_display_name=threepid["address"],
pushkey=threepid["address"],
lang=None, # We don't know a user's language here
data={},
)
if "avatar_url" in body and type(body["avatar_url"]) == str:
await self.profile_handler.set_avatar_url(
user_id, requester, body["avatar_url"], True
)
ret = await self.admin_handler.get_user(target_user)
return 201, ret
|
https://github.com/matrix-org/synapse/issues/8871
|
2020-12-03 17:54:46,740 - synapse.http.server - 79 - ERROR - PUT-4829- Failed handle request via 'UserRestServletV2': <XForwardedForRequest at 0x7fea0361d880 method='PUT' uri='/_synapse/admin/v2/users/%40demo2_fake%3Ahs-mi1-staging.ems.host' clientproto='HTTP/1.1' site=8008>
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 228, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/usr/local/lib/python3.8/site-packages/synapse/http/server.py", line 405, in _async_render
callback_return = await raw_callback_return
File "/usr/local/lib/python3.8/site-packages/synapse/rest/admin/users.py", line 321, in on_PUT
await self.profile_handler.set_avatar_url(
File "/usr/local/lib/python3.8/site-packages/synapse/handlers/profile.py", line 264, in set_avatar_url
if not self.hs.is_mine(target_user):
File "/usr/local/lib/python3.8/site-packages/synapse/server.py", line 297, in is_mine
return domain_specific_string.domain == self.hostname
AttributeError: 'str' object has no attribute 'domain'
|
AttributeError
|
def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerConfig]):
"""
Start a Synapse server or worker.
Should be called once the reactor is running and (if we're using ACME) the
TLS certificates are in place.
Will start the main HTTP listeners and do some other startup tasks, and then
notify systemd.
Args:
hs: homeserver instance
listeners: Listener configuration ('listeners' in homeserver.yaml)
"""
try:
# Set up the SIGHUP machinery.
if hasattr(signal, "SIGHUP"):
@wrap_as_background_process("sighup")
def handle_sighup(*args, **kwargs):
# Tell systemd our state, if we're using it. This will silently fail if
# we're not using systemd.
sdnotify(b"RELOADING=1")
for i, args, kwargs in _sighup_callbacks:
i(*args, **kwargs)
sdnotify(b"READY=1")
# We defer running the sighup handlers until next reactor tick. This
# is so that we're in a sane state, e.g. flushing the logs may fail
# if the sighup happens in the middle of writing a log entry.
def run_sighup(*args, **kwargs):
hs.get_clock().call_later(0, handle_sighup, *args, **kwargs)
signal.signal(signal.SIGHUP, run_sighup)
register_sighup(refresh_certificate, hs)
# Load the certificate from disk.
refresh_certificate(hs)
# Start the tracer
synapse.logging.opentracing.init_tracer( # type: ignore[attr-defined] # noqa
hs
)
# It is now safe to start your Synapse.
hs.start_listening(listeners)
hs.get_datastore().db_pool.start_profiling()
hs.get_pusherpool().start()
# Log when we start the shut down process.
hs.get_reactor().addSystemEventTrigger(
"before", "shutdown", logger.info, "Shutting down..."
)
setup_sentry(hs)
setup_sdnotify(hs)
# If background tasks are running on the main process, start collecting the
# phone home stats.
if hs.config.run_background_tasks:
start_phone_stats_home(hs)
# We now freeze all allocated objects in the hopes that (almost)
# everything currently allocated are things that will be used for the
# rest of time. Doing so means less work each GC (hopefully).
#
# This only works on Python 3.7
if sys.version_info >= (3, 7):
gc.collect()
gc.freeze()
except Exception:
traceback.print_exc(file=sys.stderr)
reactor = hs.get_reactor()
if reactor.running:
reactor.stop()
sys.exit(1)
|
def start(hs: "synapse.server.HomeServer", listeners: Iterable[ListenerConfig]):
"""
Start a Synapse server or worker.
Should be called once the reactor is running and (if we're using ACME) the
TLS certificates are in place.
Will start the main HTTP listeners and do some other startup tasks, and then
notify systemd.
Args:
hs: homeserver instance
listeners: Listener configuration ('listeners' in homeserver.yaml)
"""
try:
# Set up the SIGHUP machinery.
if hasattr(signal, "SIGHUP"):
def handle_sighup(*args, **kwargs):
# Tell systemd our state, if we're using it. This will silently fail if
# we're not using systemd.
sdnotify(b"RELOADING=1")
for i, args, kwargs in _sighup_callbacks:
i(*args, **kwargs)
sdnotify(b"READY=1")
signal.signal(signal.SIGHUP, handle_sighup)
register_sighup(refresh_certificate, hs)
# Load the certificate from disk.
refresh_certificate(hs)
# Start the tracer
synapse.logging.opentracing.init_tracer( # type: ignore[attr-defined] # noqa
hs
)
# It is now safe to start your Synapse.
hs.start_listening(listeners)
hs.get_datastore().db_pool.start_profiling()
hs.get_pusherpool().start()
# Log when we start the shut down process.
hs.get_reactor().addSystemEventTrigger(
"before", "shutdown", logger.info, "Shutting down..."
)
setup_sentry(hs)
setup_sdnotify(hs)
# If background tasks are running on the main process, start collecting the
# phone home stats.
if hs.config.run_background_tasks:
start_phone_stats_home(hs)
# We now freeze all allocated objects in the hopes that (almost)
# everything currently allocated are things that will be used for the
# rest of time. Doing so means less work each GC (hopefully).
#
# This only works on Python 3.7
if sys.version_info >= (3, 7):
gc.collect()
gc.freeze()
except Exception:
traceback.print_exc(file=sys.stderr)
reactor = hs.get_reactor()
if reactor.running:
reactor.stop()
sys.exit(1)
|
https://github.com/matrix-org/synapse/issues/8769
|
--- Logging error ---
Traceback (most recent call last):
File "/usr/local/lib/python3.7/logging/__init__.py", line 1038, in emit
self.flush()
File "/usr/local/lib/python3.7/logging/__init__.py", line 1018, in flush
self.stream.flush()
File "/home/synapse/src/synapse/app/_base.py", line 253, in handle_sighup
i(*args, **kwargs)
File "/home/synapse/src/synapse/config/logger.py", line 289, in _reload_logging_config
_load_logging_config(log_config_path)
File "/home/synapse/src/synapse/config/logger.py", line 278, in _load_logging_config
logging.config.dictConfig(log_config)
File "/usr/local/lib/python3.7/logging/config.py", line 799, in dictConfig
dictConfigClass(config).configure()
File "/usr/local/lib/python3.7/logging/config.py", line 535, in configure
_clearExistingHandlers()
File "/usr/local/lib/python3.7/logging/config.py", line 272, in _clearExistingHandlers
logging.shutdown(logging._handlerList[:])
File "/usr/local/lib/python3.7/logging/__init__.py", line 2038, in shutdown
h.flush()
File "/usr/local/lib/python3.7/logging/__init__.py", line 1018, in flush
self.stream.flush()
RuntimeError: reentrant call inside <_io.BufferedWriter name='XXX.log'>
Call stack:
File "/usr/local/lib/python3.7/runpy.py", line 193, in _run_module_as_main
"__main__", mod_spec)
File "/usr/local/lib/python3.7/runpy.py", line 85, in _run_code
exec(code, run_globals)
File "/home/synapse/src/synapse/app/federation_sender.py", line 24, in <module>
start(sys.argv[1:])
File "/home/synapse/src/synapse/app/generic_worker.py", line 988, in start
_base.start_worker_reactor("synapse-generic-worker", config)
File "/home/synapse/src/synapse/app/_base.py", line 79, in start_worker_reactor
run_command=run_command,
File "/home/synapse/src/synapse/app/_base.py", line 132, in start_reactor
run()
File "/home/synapse/src/synapse/app/_base.py", line 116, in run
run_command()
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/base.py", line 1283, in run
self.mainLoop()
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/base.py", line 1295, in mainLoop
self.doIteration(t)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/epollreactor.py", line 235, in doPoll
log.callWithLogger(selectable, _drdw, selectable, fd, event)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/python/log.py", line 103, in callWithLogger
return callWithContext({"system": lp}, func, *args, **kw)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/python/log.py", line 86, in callWithContext
return context.call({ILogContext: newCtx}, func, *args, **kw)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/posixbase.py", line 614, in _doReadOrWrite
why = selectable.doRead()
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/tcp.py", line 243, in doRead
return self._dataReceived(data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/tcp.py", line 249, in _dataReceived
rval = self.protocol.dataReceived(data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/endpoints.py", line 132, in dataReceived
return self._wrappedProtocol.dataReceived(data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/protocols/tls.py", line 330, in dataReceived
self._flushReceiveBIO()
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/protocols/tls.py", line 295, in _flushReceiveBIO
ProtocolWrapper.dataReceived(self, bytes)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/protocols/policies.py", line 120, in dataReceived
self.wrappedProtocol.dataReceived(data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 1693, in dataReceived
self._parser.dataReceived(bytes)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 391, in dataReceived
HTTPParser.dataReceived(self, data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/protocols/basic.py", line 579, in dataReceived
why = self.rawDataReceived(data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 304, in rawDataReceived
self.bodyDecoder.dataReceived(data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/http.py", line 1889, in dataReceived
data = getattr(self, '_dataReceived_%s' % (self.state,))(data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/http.py", line 1857, in _dataReceived_TRAILER
self.finishCallback(data)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 456, in _finished
self.finisher(rest)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 1050, in dispatcher
return func(*args, **kwargs)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 1647, in _finishResponse_WAITING
self._disconnectParser(reason)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 1673, in _disconnectParser
parser.connectionLost(reason)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 567, in connectionLost
self.response._bodyDataFinished()
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 1050, in dispatcher
return func(*args, **kwargs)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/web/_newclient.py", line 1306, in _bodyDataFinished_CONNECTED
self._bodyProtocol.connectionLost(reason)
File "/home/synapse/env-py37/lib/python3.7/site-packages/treq/content.py", line 39, in connectionLost
self.finished.callback(None)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/defer.py", line 460, in callback
self._startRunCallbacks(result)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/defer.py", line 568, in _startRunCallbacks
self._runCallbacks()
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/defer.py", line 654, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/home/synapse/src/synapse/util/async_helpers.py", line 517, in success_cb
new_d.callback(val)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/defer.py", line 460, in callback
self._startRunCallbacks(result)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/defer.py", line 568, in _startRunCallbacks
self._runCallbacks()
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/defer.py", line 654, in _runCallbacks
current.result = callback(current.result, *args, **kw)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/defer.py", line 1475, in gotResult
_inlineCallbacks(r, g, status)
File "/home/synapse/env-py37/lib/python3.7/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/synapse/src/synapse/metrics/background_process_metrics.py", line 212, in run
result = await result
File "/home/synapse/src/synapse/federation/sender/per_destination_queue.py", line 332, in _transaction_transmission_loop
self._destination, pending_pdus, pending_edus
File "/home/synapse/src/synapse/util/metrics.py", line 92, in measured_func
r = await func(self, *args, **kwargs)
File "/home/synapse/src/synapse/federation/sender/transaction_manager.py", line 163, in send_new_transaction
logger.info("TX [%s] {%s} got %d response", destination, txn_id, code)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1383, in info
self._log(INFO, msg, args, **kwargs)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1519, in _log
self.handle(record)
File "/usr/local/lib/python3.7/logging/__init__.py", line 1529, in handle
self.callHandlers(record)
File "/home/synapse/env-py37/lib/python3.7/site-packages/sentry_sdk/integrations/logging.py", line 77, in sentry_patched_callhandlers
return old_callhandlers(self, record)
Message: 'TX [%s] {%s} got %d response'
|
RuntimeError
|
async def get_profile(self, user_id: str) -> JsonDict:
target_user = UserID.from_string(user_id)
if self.hs.is_mine(target_user):
try:
displayname = await self.store.get_profile_displayname(
target_user.localpart
)
avatar_url = await self.store.get_profile_avatar_url(target_user.localpart)
except StoreError as e:
if e.code == 404:
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
raise
return {"displayname": displayname, "avatar_url": avatar_url}
else:
try:
result = await self.federation.make_query(
destination=target_user.domain,
query_type="profile",
args={"user_id": user_id},
ignore_backoff=True,
)
return result
except RequestSendFailed as e:
raise SynapseError(502, "Failed to fetch profile") from e
except HttpResponseException as e:
if e.code < 500 and e.code != 404:
# Other codes are not allowed in c2s API
logger.info("Server replied with wrong response: %s %s", e.code, e.msg)
raise SynapseError(502, "Failed to fetch profile")
raise e.to_synapse_error()
|
async def get_profile(self, user_id: str) -> JsonDict:
target_user = UserID.from_string(user_id)
if self.hs.is_mine(target_user):
try:
displayname = await self.store.get_profile_displayname(
target_user.localpart
)
avatar_url = await self.store.get_profile_avatar_url(target_user.localpart)
except StoreError as e:
if e.code == 404:
raise SynapseError(404, "Profile was not found", Codes.NOT_FOUND)
raise
return {"displayname": displayname, "avatar_url": avatar_url}
else:
try:
result = await self.federation.make_query(
destination=target_user.domain,
query_type="profile",
args={"user_id": user_id},
ignore_backoff=True,
)
return result
except RequestSendFailed as e:
raise SynapseError(502, "Failed to fetch profile") from e
except HttpResponseException as e:
raise e.to_synapse_error()
|
https://github.com/matrix-org/synapse/issues/8520
|
2020-10-11 14:17:11,057 - synapse.crypto.keyring - 624 - INFO - PUT-394911 - Requesting keys dict_items([('conduit.rs', {'ed25519:vNlc2BKa': 1602425831054})]) from notary server matrix.org
2020-10-11 14:17:11,109 - synapse.http.matrixfederationclient - 204 - INFO - PUT-394911 - {POST-O-111774} [matrix.org] Completed request: 200 OK in 0.05 secs - POST matrix://matrix.org/_matrix/key/v2/query
2020-10-11 14:17:11,127 - synapse.federation.transport.server - 409 - INFO - PUT-394911 - Received txn zfdvFZtVGmJDVVAc from conduit.rs. (PDUs: 1, EDUs: 0)
2020-10-11 14:17:11,136 - synapse.handlers.federation - 185 - INFO - PUT-394911-$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s - handling received PDU: <FrozenEventV3 event_id='$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s', type='m.room.message', state_key='None'>
2020-10-11 14:17:11,150 - synapse.handlers.federation - 2383 - INFO - PUT-394911-$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s - auth_events contains unknown events: {'$-_058VsrZyoyhf3gf2i0Vl-psP3vFV6FKFBhuIG9fGU'}
2020-10-11 14:17:15,041 - synapse.http.matrixfederationclient - 505 - INFO - PUT-394911-$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s - {GET-O-111775} [conduit.rs] Got response headers: 404 Not Found
2020-10-11 14:17:15,042 - synapse.http.matrixfederationclient - 581 - WARNING - PUT-394911-$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s - {GET-O-111775} [conduit.rs] Request failed: GET matrix://conduit.rs/_matrix/federation/v1/event_auth/%21xYvNcQPhnkrdUmYczI%3Amatrix.org/%24rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s: HttpResponseException('404: Not Found')
2020-10-11 14:17:15,042 - synapse.handlers.federation - 2426 - ERROR - PUT-394911-$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s - Failed to get auth chain
Traceback (most recent call last):
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/handlers/federation.py", line 2387, in _update_auth_events_and_context_for_auth
origin, event.room_id, event.event_id
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/federation/federation_client.py", line 420, in get_event_auth
res = await self.transport_layer.get_event_auth(destination, room_id, event_id)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/federation/transport/client.py", line 403, in get_event_auth
content = await self.client.get_json(destination=destination, path=path)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/http/matrixfederationclient.py", line 842, in get_json
timeout=timeout,
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/http/matrixfederationclient.py", line 292, in _send_request_with_optional_trailing_slash
response = await self._send_request(request, **send_request_args)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/http/matrixfederationclient.py", line 536, in _send_request
raise e
synapse.api.errors.HttpResponseException: 404: Not Found
2020-10-11 14:17:15,051 - synapse.handlers.federation - 2447 - INFO - PUT-394911-$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s - auth_events refers to events which are not in our calculated auth chain: {'$-_058VsrZyoyhf3gf2i0Vl-psP3vFV6FKFBhuIG9fGU'}
2020-10-11 14:17:15,057 - synapse.state - 444 - INFO - PUT-394911-$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s - Resolving state for !xYvNcQPhnkrdUmYczI:matrix.org with 2 groups
2020-10-11 14:17:15,057 - synapse.handlers.federation - 2487 - INFO - PUT-394911-$rv-7dXW7o3VorMNosgraWlnctJ9qmNpgaw2a0kF9Q2s - After state res: updating auth_events with new state {}
2020-10-11 14:17:15,085 - synapse.state - 533 - INFO - persist_events-4654 - Resolving state for !xYvNcQPhnkrdUmYczI:matrix.org with 3 groups
2020-10-11 14:17:15,086 - synapse.state - 556 - INFO - persist_events-4654 - Resolving conflicted state for '!xYvNcQPhnkrdUmYczI:matrix.org'
2020-10-11 14:17:15,292 - synapse.access.http.8008 - 311 - INFO - PUT-394911 - 2001:16b8:632:1300:5c4b:f9ef:8bf1:a45d - 8008 - {conduit.rs} Processed request: 4.238sec/-0.000sec (0.029sec, 0.007sec) (0.011sec/0.038sec/13) 60B 200 "PUT /_matrix/federation/v1/send/zfdvFZtVGmJDVVAc HTTP/1.0" "-" [3 dbevts]
2020-10-11 14:17:15,705 - synapse.http.matrixfederationclient - 973 - INFO - GET-394937 - {GET-O-111776} [conduit.rs] Completed: 200 OK [169573 bytes] GET matrix://conduit.rs/_matrix/media/r0/download/conduit.rs/5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M?allow_remote=false
2020-10-11 14:17:15,706 - synapse.rest.media.v1.media_repository - 407 - INFO - GET-394937 - Stored remote media in file '/home/sous-synapse/install/media_store/remote_content/conduit.rs/mn/HY/wAvZjpxEADcUaGGLkeKV'
2020-10-11 14:17:15,715 - synapse.http.server - 85 - ERROR - GET-394937 - Failed handle request via 'ThumbnailResource': <XForwardedForRequest at 0x7f712a20a278 method='GET' uri='/_matrix/media/r0/thumbnail/conduit.rs/5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M?width=196&height=196&method=scale&allow_remote=true' clientproto='HTTP/1.0' site=8008>
Traceback (most recent call last):
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/http/server.py", line 230, in _async_render_wrapper
callback_return = await self._async_render(request)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/http/server.py", line 258, in _async_render
callback_return = await raw_callback_return
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/rest/media/v1/thumbnail_resource.py", line 70, in _async_render_GET
request, server_name, media_id, width, height, method, m_type
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/rest/media/v1/thumbnail_resource.py", line 246, in _respond_remote_thumbnail
media_info = await self.media_repo.get_remote_media_info(server_name, media_id)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/rest/media/v1/media_repository.py", line 278, in get_remote_media_info
server_name, media_id
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/rest/media/v1/media_repository.py", line 326, in _get_remote_media_impl
media_info = await self._download_remote_file(server_name, media_id, file_id)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/rest/media/v1/media_repository.py", line 416, in _download_remote_file
filesystem_id=file_id,
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/databases/main/media_repository.py", line 237, in store_cached_remote_media
desc="store_cached_remote_media",
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/database.py", line 670, in simple_insert
await self.runInteraction(desc, self.simple_insert_txn, table, values)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/database.py", line 541, in runInteraction
**kwargs
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/database.py", line 590, in runWithConnection
self._db_pool.runWithConnection(inner_func, *args, **kwargs)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/twisted/python/threadpool.py", line 250, in inContext
result = inContext.theWork()
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/twisted/python/threadpool.py", line 266, in <lambda>
inContext.theWork = lambda: context.call(ctx, func, *args, **kw)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/twisted/python/context.py", line 122, in callWithContext
return self.currentContext().callWithContext(ctx, func, *args, **kw)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/twisted/python/context.py", line 85, in callWithContext
return func(*args,**kw)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/twisted/enterprise/adbapi.py", line 306, in _runWithConnection
compat.reraise(excValue, excTraceback)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/twisted/python/compat.py", line 464, in reraise
raise exception.with_traceback(traceback)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/twisted/enterprise/adbapi.py", line 297, in _runWithConnection
result = func(conn, *args, **kw)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/database.py", line 587, in inner_func
return func(conn, *args, **kwargs)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/database.py", line 429, in new_transaction
r = func(cursor, *args, **kwargs)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/database.py", line 691, in simple_insert_txn
txn.execute(sql, vals)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/database.py", line 212, in execute
self._do_execute(self.txn.execute, sql, *args)
File "/home/sous-synapse/install/env/lib/python3.7/site-packages/synapse/storage/database.py", line 238, in _do_execute
return func(sql, *args)
psycopg2.errors.UniqueViolation: duplicate key value violates unique constraint "remote_media_cache_media_origin_media_id_key"
DETAIL: Key (media_origin, media_id)=(conduit.rs, 5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M) already exists.
2020-10-11 14:17:15,725 - synapse.access.http.8008 - 311 - INFO - GET-394937 - Red.act.ed.IP4 - 8008 - {None} Processed request: 0.335sec/-0.000sec (0.010sec, 0.000sec) (0.002sec/0.010sec/2) 55B 500 "GET /_matrix/media/r0/thumbnail/conduit.rs/5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M?width=196&height=196&method=scale&allow_remote=true HTTP/1.0" "Python/3.8 aiohttp/3.6.2" [0 dbevts]
2020-10-11 14:17:15,770 - synapse.access.http.8008 - 311 - INFO - GET-394940 - Red.act.ed.IP4 - 8008 - {None} Processed request: 0.018sec/-0.000sec (0.002sec, 0.001sec) (0.001sec/0.014sec/2) 322B 404 "GET /_matrix/media/r0/thumbnail/conduit.rs/5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M?width=32&height=32&method=scale&allow_remote=true HTTP/1.0" "Python/3.8 aiohttp/3.6.2" [0 dbevts]
2020-10-11 14:17:15,840 - synapse.http.matrixfederationclient - 505 - INFO - GET-394941 - {GET-O-111777} [conduit.rs] Got response headers: 401 Unauthorized
2020-10-11 14:17:15,841 - synapse.http.matrixfederationclient - 581 - WARNING - GET-394941 - {GET-O-111777} [conduit.rs] Request failed: GET matrix://conduit.rs/_matrix/federation/v1/query/profile?user_id=%40timo%3Aconduit.rs&field=displayname: HttpResponseException('401: Unauthorized')
2020-10-11 14:17:15,841 - synapse.http.server - 76 - INFO - GET-394941 - <XForwardedForRequest at 0x7f7134cee7b8 method='GET' uri='/_matrix/client/r0/profile/@timo:conduit.rs' clientproto='HTTP/1.0' site=8008> SynapseError: 401 - Unauthorized
2020-10-11 14:17:15,842 - synapse.access.http.8008 - 311 - INFO - GET-394941 - Red.act.ed.IP4 - 8008 - {None} Processed request: 0.038sec/-0.000sec (0.010sec, 0.000sec) (0.000sec/0.000sec/0) 46B 401 "GET /_matrix/client/r0/profile/@timo:conduit.rs HTTP/1.0" "Python/3.8 aiohttp/3.6.2" [0 dbevts]
2020-10-11 14:17:15,848 - synapse.access.http.8008 - 311 - INFO - GET-394942 - Red.act.ed.IP4 - 8008 - {None} Processed request: 0.004sec/-0.000sec (0.002sec, 0.000sec) (0.001sec/0.002sec/2) 322B 404 "GET /_matrix/media/r0/thumbnail/conduit.rs/5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M?width=196&height=196&method=scale&allow_remote=true HTTP/1.0" "Python/3.8 aiohttp/3.6.2" [0 dbevts]
2020-10-11 14:17:15,899 - synapse.access.http.8008 - 311 - INFO - GET-394944 - Red.act.ed.IP4 - 8008 - {None} Processed request: 0.006sec/-0.000sec (0.002sec, 0.000sec) (0.001sec/0.003sec/2) 322B 404 "GET /_matrix/media/r0/thumbnail/conduit.rs/5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M?width=32&height=32&method=scale&allow_remote=true HTTP/1.0" "Python/3.8 aiohttp/3.6.2" [0 dbevts]
2020-10-11 14:17:16,042 - synapse.access.http.8008 - 311 - INFO - GET-394945 - Red.act.ed.IP4 - 8008 - {None} Processed request: 0.003sec/-0.000sec (0.002sec, 0.000sec) (0.000sec/0.001sec/2) 322B 404 "GET /_matrix/media/r0/thumbnail/conduit.rs/5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M?width=196&height=196&method=scale&allow_remote=true HTTP/1.0" "Python/3.8 aiohttp/3.6.2" [0 dbevts]
2020-10-11 14:17:16,089 - synapse.access.http.8008 - 311 - INFO - GET-394946 - Red.act.ed.IP4 - 8008 - {None} Processed request: 0.003sec/-0.000sec (0.001sec, 0.000sec) (0.001sec/0.001sec/2) 322B 404 "GET /_matrix/media/r0/thumbnail/conduit.rs/5X8noVQpyo70KZ1Cqbn3PuU4ApXC0ZcUn7fpdVNkujeTX5bVSPP9mM3gGFgNk4Qn43bU4DW3PT4mET8MmIHx5ji298sd7LXomd0qqYABwOpbhwCCW7U9Yqj7mhjgx8vZQyZZsZ7bV3E3F4e4m6l0of9tW94nvsAvBvJNFIF8YpsXvefkGFyYueNL5kFDWW8ImgmWIOzHSgxiFUQvL4JdDqqmhmQrI1AMVQFj7OkzidaoKVUSK2l7r0jQL0ADTQ6M?width=32&height=32&method=scale&allow_remote=true HTTP/1.0" "Python/3.8 aiohttp/3.6.2" [0 dbevts]
|
synapse.api.errors.HttpResponseException
|
def respond_with_json_bytes(
request: Request,
code: int,
json_bytes: bytes,
send_cors: bool = False,
):
"""Sends encoded JSON in response to the given request.
Args:
request: The http request to respond to.
code: The HTTP response code.
json_bytes: The json bytes to use as the response body.
send_cors: Whether to send Cross-Origin Resource Sharing headers
https://fetch.spec.whatwg.org/#http-cors-protocol
Returns:
twisted.web.server.NOT_DONE_YET if the request is still active.
"""
if request._disconnected:
logger.warning(
"Not sending response to request %s, already disconnected.", request
)
return
request.setResponseCode(code)
request.setHeader(b"Content-Type", b"application/json")
request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
request.setHeader(b"Cache-Control", b"no-cache, no-store, must-revalidate")
if send_cors:
set_cors_headers(request)
# note that this is zero-copy (the bytesio shares a copy-on-write buffer with
# the original `bytes`).
bytes_io = BytesIO(json_bytes)
producer = NoRangeStaticProducer(request, bytes_io)
producer.start()
return NOT_DONE_YET
|
def respond_with_json_bytes(
request: Request,
code: int,
json_bytes: bytes,
send_cors: bool = False,
):
"""Sends encoded JSON in response to the given request.
Args:
request: The http request to respond to.
code: The HTTP response code.
json_bytes: The json bytes to use as the response body.
send_cors: Whether to send Cross-Origin Resource Sharing headers
https://fetch.spec.whatwg.org/#http-cors-protocol
Returns:
twisted.web.server.NOT_DONE_YET if the request is still active.
"""
request.setResponseCode(code)
request.setHeader(b"Content-Type", b"application/json")
request.setHeader(b"Content-Length", b"%d" % (len(json_bytes),))
request.setHeader(b"Cache-Control", b"no-cache, no-store, must-revalidate")
if send_cors:
set_cors_headers(request)
# note that this is zero-copy (the bytesio shares a copy-on-write buffer with
# the original `bytes`).
bytes_io = BytesIO(json_bytes)
producer = NoRangeStaticProducer(request, bytes_io)
producer.start()
return NOT_DONE_YET
|
https://github.com/matrix-org/synapse/issues/5304
|
2019-05-31 11:55:56,270 - synapse.access.http.8008 - 233 - INFO - GET-1116745- 176.14.254.64 - 8008 - Received request: GET /_matrix/media/v1/thumbnail/amorgan.xyz/JpHpuDNOxuALIaPSENEAzZIu?width=800&height=600
2019-05-31 11:55:56,273 - synapse.access.http.8008 - 302 - INFO - GET-1116745- 176.14.254.64 - 8008 - {None} Processed request: 0.003sec/-0.000sec (0.000sec, 0.000sec) (0.000sec/0.001sec/2) 92715B 200 "GET /_matrix/media/v1/thumbnail/amorgan.xyz/JpHpuDNOxuALIaPSENEAzZIu?width=800&height=600 HTTP/1.0" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:67.0) Gecko/20100101 Firefox/67.0" [0 dbevts]
2019-05-31 11:55:56,321 - synapse.access.http.8008 - 233 - INFO - GET-1116746- 176.14.254.64 - 8008 - Received request: GET /_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop
2019-05-31 11:55:56,322 - synapse.http.site - 203 - WARNING - GET-1116746- Error processing request <XForwardedForRequest at 0x7feb7a25e860 method='GET' uri='/_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop' clientproto='HTTP/1.0' site=8008>: <class 'twisted.internet.error.ConnectionDone'> Connection was closed cleanly.
2019-05-31 11:55:56,325 - synapse.rest.media.v1._base - 192 - WARNING - GET-1116746- Failed to write to consumer: <class 'AttributeError'> 'NoneType' object has no attribute 'registerProducer'
2019-05-31 11:55:56,325 - synapse.http.server - 112 - ERROR - GET-1116746- Failed handle request via 'ThumbnailResource': <XForwardedForRequest at 0x7feb7a25e860 method='GET' uri='/_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop' clientproto='HTTP/1.0' site=8008>
Traceback (most recent call last):
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/storage/_base.py", line 527, in runWithConnection
defer.returnValue(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1362, in returnValue
raise _DefGen_Return(val)
twisted.internet.defer._DefGen_Return: [{'thumbnail_width': 32, 'thumbnail_type': 'image/png', 'thumbnail_height': 32, 'thumbnail_length': 2341, 'thumbnail_method': 'crop'}, {'thumbnail_width': 239, 'thumbnail_type': 'image/png', 'thumbnail_height': 240, 'thumbnail_length': 84450, 'thumbnail_method': 'scale'}, {'thumbnail_width': 399, 'thumbnail_type': 'image/png', 'thumbnail_height': 400, 'thumbnail_length': 196365, 'thumbnail_method': 'scale'}, {'thumbnail_width': 96, 'thumbnail_type': 'image/png', 'thumbnail_height': 96, 'thumbnail_length': 16151, 'thumbnail_method': 'crop'}]
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/storage/_base.py", line 487, in runInteraction
defer.returnValue(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1362, in returnValue
raise _DefGen_Return(val)
twisted.internet.defer._DefGen_Return: [{'thumbnail_width': 32, 'thumbnail_type': 'image/png', 'thumbnail_height': 32, 'thumbnail_length': 2341, 'thumbnail_method': 'crop'}, {'thumbnail_width': 239, 'thumbnail_type': 'image/png', 'thumbnail_height': 240, 'thumbnail_length': 84450, 'thumbnail_method': 'scale'}, {'thumbnail_width': 399, 'thumbnail_type': 'image/png', 'thumbnail_height': 400, 'thumbnail_length': 196365, 'thumbnail_method': 'scale'}, {'thumbnail_width': 96, 'thumbnail_type': 'image/png', 'thumbnail_height': 96, 'thumbnail_length': 16151, 'thumbnail_method': 'crop'}]
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/_base.py", line 187, in respond_with_responder
yield responder.write_to_consumer(request)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/media_storage.py", line 263, in write_to_consumer
FileSender().beginFileTransfer(self.open_file, consumer)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/protocols/basic.py", line 923, in beginFileTransfer
self.consumer.registerProducer(self, False)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/web/http.py", line 961, in registerProducer
self.channel.registerProducer(producer, streaming)
AttributeError: 'NoneType' object has no attribute 'registerProducer'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/http/server.py", line 81, in wrapped_request_handler
yield h(self, request)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/thumbnail_resource.py", line 71, in _async_render_GET
request, media_id, width, height, method, m_type
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/thumbnail_resource.py", line 121, in _respond_local_thumbnail
yield respond_with_responder(request, responder, t_type, t_length)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/_base.py", line 196, in respond_with_responder
request.unregisterProducer()
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/web/http.py", line 967, in unregisterProducer
self.channel.unregisterProducer()
AttributeError: 'NoneType' object has no attribute 'unregisterProducer'
2019-05-31 11:55:56,326 - synapse.http.server - 415 - WARNING - GET-1116746- Not sending response to request <XForwardedForRequest at 0x7feb7a25e860 method='GET' uri='/_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop' clientproto='HTTP/1.0' site=8008>, already disconnected.
2019-05-31 11:55:56,326 - synapse.access.http.8008 - 302 - INFO - GET-1116746- 176.14.254.64 - 8008 - {None} Processed request: 0.005sec/-0.004sec (0.000sec, 0.000sec) (0.001sec/0.002sec/2) 0B 200! "GET /_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop HTTP/1.0" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:67.0) Gecko/20100101 Firefox/67.0" [0 dbevts]
|
AttributeError
|
async def respond_with_responder(
request, responder, media_type, file_size, upload_name=None
):
"""Responds to the request with given responder. If responder is None then
returns 404.
Args:
request (twisted.web.http.Request)
responder (Responder|None)
media_type (str): The media/content type.
file_size (int|None): Size in bytes of the media. If not known it should be None
upload_name (str|None): The name of the requested file, if any.
"""
if request._disconnected:
logger.warning(
"Not sending response to request %s, already disconnected.", request
)
return
if not responder:
respond_404(request)
return
logger.debug("Responding to media request with responder %s", responder)
add_file_headers(request, media_type, file_size, upload_name)
try:
with responder:
await responder.write_to_consumer(request)
except Exception as e:
# The majority of the time this will be due to the client having gone
# away. Unfortunately, Twisted simply throws a generic exception at us
# in that case.
logger.warning("Failed to write to consumer: %s %s", type(e), e)
# Unregister the producer, if it has one, so Twisted doesn't complain
if request.producer:
request.unregisterProducer()
finish_request(request)
|
async def respond_with_responder(
request, responder, media_type, file_size, upload_name=None
):
"""Responds to the request with given responder. If responder is None then
returns 404.
Args:
request (twisted.web.http.Request)
responder (Responder|None)
media_type (str): The media/content type.
file_size (int|None): Size in bytes of the media. If not known it should be None
upload_name (str|None): The name of the requested file, if any.
"""
if not responder:
respond_404(request)
return
logger.debug("Responding to media request with responder %s", responder)
add_file_headers(request, media_type, file_size, upload_name)
try:
with responder:
await responder.write_to_consumer(request)
except Exception as e:
# The majority of the time this will be due to the client having gone
# away. Unfortunately, Twisted simply throws a generic exception at us
# in that case.
logger.warning("Failed to write to consumer: %s %s", type(e), e)
# Unregister the producer, if it has one, so Twisted doesn't complain
if request.producer:
request.unregisterProducer()
finish_request(request)
|
https://github.com/matrix-org/synapse/issues/5304
|
2019-05-31 11:55:56,270 - synapse.access.http.8008 - 233 - INFO - GET-1116745- 176.14.254.64 - 8008 - Received request: GET /_matrix/media/v1/thumbnail/amorgan.xyz/JpHpuDNOxuALIaPSENEAzZIu?width=800&height=600
2019-05-31 11:55:56,273 - synapse.access.http.8008 - 302 - INFO - GET-1116745- 176.14.254.64 - 8008 - {None} Processed request: 0.003sec/-0.000sec (0.000sec, 0.000sec) (0.000sec/0.001sec/2) 92715B 200 "GET /_matrix/media/v1/thumbnail/amorgan.xyz/JpHpuDNOxuALIaPSENEAzZIu?width=800&height=600 HTTP/1.0" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:67.0) Gecko/20100101 Firefox/67.0" [0 dbevts]
2019-05-31 11:55:56,321 - synapse.access.http.8008 - 233 - INFO - GET-1116746- 176.14.254.64 - 8008 - Received request: GET /_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop
2019-05-31 11:55:56,322 - synapse.http.site - 203 - WARNING - GET-1116746- Error processing request <XForwardedForRequest at 0x7feb7a25e860 method='GET' uri='/_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop' clientproto='HTTP/1.0' site=8008>: <class 'twisted.internet.error.ConnectionDone'> Connection was closed cleanly.
2019-05-31 11:55:56,325 - synapse.rest.media.v1._base - 192 - WARNING - GET-1116746- Failed to write to consumer: <class 'AttributeError'> 'NoneType' object has no attribute 'registerProducer'
2019-05-31 11:55:56,325 - synapse.http.server - 112 - ERROR - GET-1116746- Failed handle request via 'ThumbnailResource': <XForwardedForRequest at 0x7feb7a25e860 method='GET' uri='/_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop' clientproto='HTTP/1.0' site=8008>
Traceback (most recent call last):
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/storage/_base.py", line 527, in runWithConnection
defer.returnValue(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1362, in returnValue
raise _DefGen_Return(val)
twisted.internet.defer._DefGen_Return: [{'thumbnail_width': 32, 'thumbnail_type': 'image/png', 'thumbnail_height': 32, 'thumbnail_length': 2341, 'thumbnail_method': 'crop'}, {'thumbnail_width': 239, 'thumbnail_type': 'image/png', 'thumbnail_height': 240, 'thumbnail_length': 84450, 'thumbnail_method': 'scale'}, {'thumbnail_width': 399, 'thumbnail_type': 'image/png', 'thumbnail_height': 400, 'thumbnail_length': 196365, 'thumbnail_method': 'scale'}, {'thumbnail_width': 96, 'thumbnail_type': 'image/png', 'thumbnail_height': 96, 'thumbnail_length': 16151, 'thumbnail_method': 'crop'}]
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/storage/_base.py", line 487, in runInteraction
defer.returnValue(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1362, in returnValue
raise _DefGen_Return(val)
twisted.internet.defer._DefGen_Return: [{'thumbnail_width': 32, 'thumbnail_type': 'image/png', 'thumbnail_height': 32, 'thumbnail_length': 2341, 'thumbnail_method': 'crop'}, {'thumbnail_width': 239, 'thumbnail_type': 'image/png', 'thumbnail_height': 240, 'thumbnail_length': 84450, 'thumbnail_method': 'scale'}, {'thumbnail_width': 399, 'thumbnail_type': 'image/png', 'thumbnail_height': 400, 'thumbnail_length': 196365, 'thumbnail_method': 'scale'}, {'thumbnail_width': 96, 'thumbnail_type': 'image/png', 'thumbnail_height': 96, 'thumbnail_length': 16151, 'thumbnail_method': 'crop'}]
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/_base.py", line 187, in respond_with_responder
yield responder.write_to_consumer(request)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/media_storage.py", line 263, in write_to_consumer
FileSender().beginFileTransfer(self.open_file, consumer)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/protocols/basic.py", line 923, in beginFileTransfer
self.consumer.registerProducer(self, False)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/web/http.py", line 961, in registerProducer
self.channel.registerProducer(producer, streaming)
AttributeError: 'NoneType' object has no attribute 'registerProducer'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/http/server.py", line 81, in wrapped_request_handler
yield h(self, request)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/thumbnail_resource.py", line 71, in _async_render_GET
request, media_id, width, height, method, m_type
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/thumbnail_resource.py", line 121, in _respond_local_thumbnail
yield respond_with_responder(request, responder, t_type, t_length)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/internet/defer.py", line 1418, in _inlineCallbacks
result = g.send(result)
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/synapse/rest/media/v1/_base.py", line 196, in respond_with_responder
request.unregisterProducer()
File "/home/ops/.synapse3/env3/lib/python3.5/site-packages/twisted/web/http.py", line 967, in unregisterProducer
self.channel.unregisterProducer()
AttributeError: 'NoneType' object has no attribute 'unregisterProducer'
2019-05-31 11:55:56,326 - synapse.http.server - 415 - WARNING - GET-1116746- Not sending response to request <XForwardedForRequest at 0x7feb7a25e860 method='GET' uri='/_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop' clientproto='HTTP/1.0' site=8008>, already disconnected.
2019-05-31 11:55:56,326 - synapse.access.http.8008 - 302 - INFO - GET-1116746- 176.14.254.64 - 8008 - {None} Processed request: 0.005sec/-0.004sec (0.000sec, 0.000sec) (0.001sec/0.002sec/2) 0B 200! "GET /_matrix/media/v1/thumbnail/amorgan.xyz/chzgDJfCkiDOFITyulcRWQOn?width=40&height=40&method=crop HTTP/1.0" "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:67.0) Gecko/20100101 Firefox/67.0" [0 dbevts]
|
AttributeError
|
def check_redaction(
room_version_obj: RoomVersion,
event: EventBase,
auth_events: StateMap[EventBase],
) -> bool:
"""Check whether the event sender is allowed to redact the target event.
Returns:
True if the the sender is allowed to redact the target event if the
target event was created by them.
False if the sender is allowed to redact the target event with no
further checks.
Raises:
AuthError if the event sender is definitely not allowed to redact
the target event.
"""
user_level = get_user_power_level(event.user_id, auth_events)
redact_level = _get_named_level(auth_events, "redact", 50)
if user_level >= redact_level:
return False
if room_version_obj.event_format == EventFormatVersions.V1:
redacter_domain = get_domain_from_id(event.event_id)
if not isinstance(event.redacts, str):
return False
redactee_domain = get_domain_from_id(event.redacts)
if redacter_domain == redactee_domain:
return True
else:
event.internal_metadata.recheck_redaction = True
return True
raise AuthError(403, "You don't have permission to redact events")
|
def check_redaction(
room_version_obj: RoomVersion,
event: EventBase,
auth_events: StateMap[EventBase],
) -> bool:
"""Check whether the event sender is allowed to redact the target event.
Returns:
True if the the sender is allowed to redact the target event if the
target event was created by them.
False if the sender is allowed to redact the target event with no
further checks.
Raises:
AuthError if the event sender is definitely not allowed to redact
the target event.
"""
user_level = get_user_power_level(event.user_id, auth_events)
redact_level = _get_named_level(auth_events, "redact", 50)
if user_level >= redact_level:
return False
if room_version_obj.event_format == EventFormatVersions.V1:
redacter_domain = get_domain_from_id(event.event_id)
redactee_domain = get_domain_from_id(event.redacts)
if redacter_domain == redactee_domain:
return True
else:
event.internal_metadata.recheck_redaction = True
return True
raise AuthError(403, "You don't have permission to redact events")
|
https://github.com/matrix-org/synapse/issues/8397
|
synapse_1 | 2020-09-24 18:15:54,480 - synapse.handlers.federation - 1146 - ERROR - GET-3753 - Failed to backfill from t2bot.io because FirstError[#0, [Failure instance: Traceback: <class 'AttributeError'>: 'NoneType' object has no attribute 'find'
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:460:callback
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:568:_startRunCallbacks
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:654:_runCallbacks
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:1475:gotResult
synapse_1 | --- <exception caught here> ---
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:1418:_inlineCallbacks
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py:1984:prep
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py:2134:_prep_event
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py:2322:do_auth
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/event_auth.py:190:check
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/event_auth.py:449:check_redaction
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/types.py:152:get_domain_from_id
synapse_1 | ]]
synapse_1 | Traceback (most recent call last):
synapse_1 | File "/usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py", line 1115, in try_backfill
synapse_1 | dom, room_id, limit=100, extremities=extremities
synapse_1 | File "/usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py", line 926, in backfill
synapse_1 | await self._handle_new_events(dest, ev_infos, backfilled=True)
synapse_1 | File "/usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py", line 1991, in _handle_new_events
synapse_1 | consumeErrors=True,
synapse_1 | twisted.internet.defer.FirstError: FirstError[#0, [Failure instance: Traceback: <class 'AttributeError'>: 'NoneType' object has no attribute 'find'
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:460:callback
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:568:_startRunCallbacks
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:654:_runCallbacks
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:1475:gotResult
synapse_1 | --- <exception caught here> ---
synapse_1 | /usr/local/lib/python3.7/site-packages/twisted/internet/defer.py:1418:_inlineCallbacks
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py:1984:prep
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py:2134:_prep_event
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/handlers/federation.py:2322:do_auth
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/event_auth.py:190:check
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/event_auth.py:449:check_redaction
synapse_1 | /usr/local/lib/python3.7/site-packages/synapse/types.py:152:get_domain_from_id
synapse_1 | ]]
|
FirstError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.