repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
commonsense/metanl | metanl/extprocess.py | ProcessWrapper._get_process | python | def _get_process(self):
command = self._get_command()
return subprocess.Popen(command, bufsize=-1, close_fds=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE) | Create the process by running the specified command. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/extprocess.py#L70-L77 | [
"def _get_command(self):\n \"\"\"\n This method should return the command to run, as a list\n of arguments that can be used by subprocess.Popen.\n \"\"\"\n raise NotImplementedError\n"
] | class ProcessWrapper(object):
"""
A ProcessWrapper uses the `subprocess` module to keep a process open that
we can pipe stuff through to get NLP results.
Instead of every instance immediately opening a process, however, it waits
until the first time it is needed, then starts the process.
Many methods are intended to be implemented by subclasses of ProcessWrapper
that actually know what program they're talking to.
"""
def __del__(self):
"""
Clean up by closing the pipe.
"""
if hasattr(self, '_process'):
self._process.stdin.close()
@property
def process(self):
"""
Store the actual process in _process. If it doesn't exist yet, create
it.
"""
if hasattr(self, '_process'):
return self._process
else:
self._process = self._get_process()
return self._process
def _get_command(self):
"""
This method should return the command to run, as a list
of arguments that can be used by subprocess.Popen.
"""
raise NotImplementedError
def get_record_root(self, record):
"""
Given a *record* (the data that the external process returns for a
given single token), this specifies how to extract its root word
(aka its lemma).
"""
raise NotImplementedError
def get_record_token(self, record):
"""
Given a record, this specifies how to extract the exact word or token
that was processed.
"""
raise NotImplementedError
def analyze(self, text):
"""
Take text as input, run it through the external process, and return a
list of *records* containing the results.
"""
raise NotImplementedError
def send_input(self, data):
self.process.stdin.write(data)
self.process.stdin.flush()
def receive_output_line(self):
line = self.process.stdout.readline()
if not line:
raise ProcessError("reached end of output")
return line
def restart_process(self):
if hasattr(self, '_process'):
self._process.stdin.close()
self._process = self._get_process()
return self._process
def tokenize_list(self, text):
"""
Split a text into separate words.
"""
return [self.get_record_token(record) for record in self.analyze(text)]
def tokenize(self, text):
"""
Yell at people who are still using simplenlp's bad idea of
tokenization.
"""
raise NotImplementedError("tokenize is deprecated. Use tokenize_list.")
def is_stopword_record(self, record, common_words=False):
"""
Given a record, return whether it represents a stopword (a word that
should be discarded in NLP results).
Note that we want very few words to be stopwords. Words that are
meaningful but simply common can be recognized by their very high word
frequency, and handled appropriately. Often, we only want determiners
(such as 'a', 'an', and 'the' in English) to be stopwords.
Takes in a vestigial parameter, `common_words`, and ignores it.
"""
raise NotImplementedError
def is_stopword(self, text):
"""
Determine whether a single word is a stopword, or whether a short
phrase is made entirely of stopwords, disregarding context.
Use of this function should be avoided; it's better to give the text
in context and let the process determine which words are the stopwords.
"""
found_content_word = False
for record in self.analyze(text):
if not self.is_stopword_record(record):
found_content_word = True
break
return not found_content_word
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
This default implementation simply distinguishes stopwords from
non-stopwords.
"""
if self.is_stopword_record(record):
return 'STOP'
else:
return 'TERM'
def normalize_list(self, text, cache=None):
"""
Get a canonical list representation of text, with words
separated and reduced to their base forms.
TODO: use the cache.
"""
words = []
analysis = self.analyze(text)
for record in analysis:
if not self.is_stopword_record(record):
words.append(self.get_record_root(record))
if not words:
# Don't discard stopwords if that's all you've got
words = [self.get_record_token(record) for record in analysis]
return words
def normalize(self, text, cache=None):
"""
Get a canonical string representation of this text, like
:meth:`normalize_list` but joined with spaces.
TODO: use the cache.
"""
return ' '.join(self.normalize_list(text, cache))
def tag_and_stem(self, text, cache=None):
"""
Given some text, return a sequence of (stem, pos, text) triples as
appropriate for the reader. `pos` can be as general or specific as
necessary (for example, it might label all parts of speech, or it might
only distinguish function words from others).
Twitter-style hashtags and at-mentions have the stem and pos they would
have without the leading # or @. For instance, if the reader's triple
for "thing" is ('thing', 'NN', 'things'), then "#things" would come out
as ('thing', 'NN', '#things').
"""
analysis = self.analyze(text)
triples = []
for record in analysis:
root = self.get_record_root(record)
token = self.get_record_token(record)
if token:
if unicode_is_punctuation(token):
triples.append((token, '.', token))
else:
pos = self.get_record_pos(record)
triples.append((root, pos, token))
return triples
def extract_phrases(self, text):
"""
Given some text, extract phrases of up to 2 content words,
and map their normalized form to the complete phrase.
"""
analysis = self.analyze(text)
for pos1 in range(len(analysis)):
rec1 = analysis[pos1]
if not self.is_stopword_record(rec1):
yield self.get_record_root(rec1), rec1[0]
for pos2 in range(pos1 + 1, len(analysis)):
rec2 = analysis[pos2]
if not self.is_stopword_record(rec2):
roots = [self.get_record_root(rec1),
self.get_record_root(rec2)]
pieces = [analysis[i][0] for i in range(pos1, pos2+1)]
term = ' '.join(roots)
phrase = ''.join(pieces)
yield term, phrase
break
|
commonsense/metanl | metanl/extprocess.py | ProcessWrapper.tokenize_list | python | def tokenize_list(self, text):
return [self.get_record_token(record) for record in self.analyze(text)] | Split a text into separate words. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/extprocess.py#L117-L121 | [
"def analyze(self, text):\n \"\"\"\n Take text as input, run it through the external process, and return a\n list of *records* containing the results.\n \"\"\"\n raise NotImplementedError\n"
] | class ProcessWrapper(object):
"""
A ProcessWrapper uses the `subprocess` module to keep a process open that
we can pipe stuff through to get NLP results.
Instead of every instance immediately opening a process, however, it waits
until the first time it is needed, then starts the process.
Many methods are intended to be implemented by subclasses of ProcessWrapper
that actually know what program they're talking to.
"""
def __del__(self):
"""
Clean up by closing the pipe.
"""
if hasattr(self, '_process'):
self._process.stdin.close()
@property
def process(self):
"""
Store the actual process in _process. If it doesn't exist yet, create
it.
"""
if hasattr(self, '_process'):
return self._process
else:
self._process = self._get_process()
return self._process
def _get_command(self):
"""
This method should return the command to run, as a list
of arguments that can be used by subprocess.Popen.
"""
raise NotImplementedError
def _get_process(self):
"""
Create the process by running the specified command.
"""
command = self._get_command()
return subprocess.Popen(command, bufsize=-1, close_fds=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
def get_record_root(self, record):
"""
Given a *record* (the data that the external process returns for a
given single token), this specifies how to extract its root word
(aka its lemma).
"""
raise NotImplementedError
def get_record_token(self, record):
"""
Given a record, this specifies how to extract the exact word or token
that was processed.
"""
raise NotImplementedError
def analyze(self, text):
"""
Take text as input, run it through the external process, and return a
list of *records* containing the results.
"""
raise NotImplementedError
def send_input(self, data):
self.process.stdin.write(data)
self.process.stdin.flush()
def receive_output_line(self):
line = self.process.stdout.readline()
if not line:
raise ProcessError("reached end of output")
return line
def restart_process(self):
if hasattr(self, '_process'):
self._process.stdin.close()
self._process = self._get_process()
return self._process
def tokenize(self, text):
"""
Yell at people who are still using simplenlp's bad idea of
tokenization.
"""
raise NotImplementedError("tokenize is deprecated. Use tokenize_list.")
def is_stopword_record(self, record, common_words=False):
"""
Given a record, return whether it represents a stopword (a word that
should be discarded in NLP results).
Note that we want very few words to be stopwords. Words that are
meaningful but simply common can be recognized by their very high word
frequency, and handled appropriately. Often, we only want determiners
(such as 'a', 'an', and 'the' in English) to be stopwords.
Takes in a vestigial parameter, `common_words`, and ignores it.
"""
raise NotImplementedError
def is_stopword(self, text):
"""
Determine whether a single word is a stopword, or whether a short
phrase is made entirely of stopwords, disregarding context.
Use of this function should be avoided; it's better to give the text
in context and let the process determine which words are the stopwords.
"""
found_content_word = False
for record in self.analyze(text):
if not self.is_stopword_record(record):
found_content_word = True
break
return not found_content_word
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
This default implementation simply distinguishes stopwords from
non-stopwords.
"""
if self.is_stopword_record(record):
return 'STOP'
else:
return 'TERM'
def normalize_list(self, text, cache=None):
"""
Get a canonical list representation of text, with words
separated and reduced to their base forms.
TODO: use the cache.
"""
words = []
analysis = self.analyze(text)
for record in analysis:
if not self.is_stopword_record(record):
words.append(self.get_record_root(record))
if not words:
# Don't discard stopwords if that's all you've got
words = [self.get_record_token(record) for record in analysis]
return words
def normalize(self, text, cache=None):
"""
Get a canonical string representation of this text, like
:meth:`normalize_list` but joined with spaces.
TODO: use the cache.
"""
return ' '.join(self.normalize_list(text, cache))
def tag_and_stem(self, text, cache=None):
"""
Given some text, return a sequence of (stem, pos, text) triples as
appropriate for the reader. `pos` can be as general or specific as
necessary (for example, it might label all parts of speech, or it might
only distinguish function words from others).
Twitter-style hashtags and at-mentions have the stem and pos they would
have without the leading # or @. For instance, if the reader's triple
for "thing" is ('thing', 'NN', 'things'), then "#things" would come out
as ('thing', 'NN', '#things').
"""
analysis = self.analyze(text)
triples = []
for record in analysis:
root = self.get_record_root(record)
token = self.get_record_token(record)
if token:
if unicode_is_punctuation(token):
triples.append((token, '.', token))
else:
pos = self.get_record_pos(record)
triples.append((root, pos, token))
return triples
def extract_phrases(self, text):
"""
Given some text, extract phrases of up to 2 content words,
and map their normalized form to the complete phrase.
"""
analysis = self.analyze(text)
for pos1 in range(len(analysis)):
rec1 = analysis[pos1]
if not self.is_stopword_record(rec1):
yield self.get_record_root(rec1), rec1[0]
for pos2 in range(pos1 + 1, len(analysis)):
rec2 = analysis[pos2]
if not self.is_stopword_record(rec2):
roots = [self.get_record_root(rec1),
self.get_record_root(rec2)]
pieces = [analysis[i][0] for i in range(pos1, pos2+1)]
term = ' '.join(roots)
phrase = ''.join(pieces)
yield term, phrase
break
|
commonsense/metanl | metanl/extprocess.py | ProcessWrapper.is_stopword | python | def is_stopword(self, text):
found_content_word = False
for record in self.analyze(text):
if not self.is_stopword_record(record):
found_content_word = True
break
return not found_content_word | Determine whether a single word is a stopword, or whether a short
phrase is made entirely of stopwords, disregarding context.
Use of this function should be avoided; it's better to give the text
in context and let the process determine which words are the stopwords. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/extprocess.py#L144-L157 | [
"def analyze(self, text):\n \"\"\"\n Take text as input, run it through the external process, and return a\n list of *records* containing the results.\n \"\"\"\n raise NotImplementedError\n",
"def is_stopword_record(self, record, common_words=False):\n \"\"\"\n Given a record, return whether ... | class ProcessWrapper(object):
"""
A ProcessWrapper uses the `subprocess` module to keep a process open that
we can pipe stuff through to get NLP results.
Instead of every instance immediately opening a process, however, it waits
until the first time it is needed, then starts the process.
Many methods are intended to be implemented by subclasses of ProcessWrapper
that actually know what program they're talking to.
"""
def __del__(self):
"""
Clean up by closing the pipe.
"""
if hasattr(self, '_process'):
self._process.stdin.close()
@property
def process(self):
"""
Store the actual process in _process. If it doesn't exist yet, create
it.
"""
if hasattr(self, '_process'):
return self._process
else:
self._process = self._get_process()
return self._process
def _get_command(self):
"""
This method should return the command to run, as a list
of arguments that can be used by subprocess.Popen.
"""
raise NotImplementedError
def _get_process(self):
"""
Create the process by running the specified command.
"""
command = self._get_command()
return subprocess.Popen(command, bufsize=-1, close_fds=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
def get_record_root(self, record):
"""
Given a *record* (the data that the external process returns for a
given single token), this specifies how to extract its root word
(aka its lemma).
"""
raise NotImplementedError
def get_record_token(self, record):
"""
Given a record, this specifies how to extract the exact word or token
that was processed.
"""
raise NotImplementedError
def analyze(self, text):
"""
Take text as input, run it through the external process, and return a
list of *records* containing the results.
"""
raise NotImplementedError
def send_input(self, data):
self.process.stdin.write(data)
self.process.stdin.flush()
def receive_output_line(self):
line = self.process.stdout.readline()
if not line:
raise ProcessError("reached end of output")
return line
def restart_process(self):
if hasattr(self, '_process'):
self._process.stdin.close()
self._process = self._get_process()
return self._process
def tokenize_list(self, text):
"""
Split a text into separate words.
"""
return [self.get_record_token(record) for record in self.analyze(text)]
def tokenize(self, text):
"""
Yell at people who are still using simplenlp's bad idea of
tokenization.
"""
raise NotImplementedError("tokenize is deprecated. Use tokenize_list.")
def is_stopword_record(self, record, common_words=False):
"""
Given a record, return whether it represents a stopword (a word that
should be discarded in NLP results).
Note that we want very few words to be stopwords. Words that are
meaningful but simply common can be recognized by their very high word
frequency, and handled appropriately. Often, we only want determiners
(such as 'a', 'an', and 'the' in English) to be stopwords.
Takes in a vestigial parameter, `common_words`, and ignores it.
"""
raise NotImplementedError
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
This default implementation simply distinguishes stopwords from
non-stopwords.
"""
if self.is_stopword_record(record):
return 'STOP'
else:
return 'TERM'
def normalize_list(self, text, cache=None):
"""
Get a canonical list representation of text, with words
separated and reduced to their base forms.
TODO: use the cache.
"""
words = []
analysis = self.analyze(text)
for record in analysis:
if not self.is_stopword_record(record):
words.append(self.get_record_root(record))
if not words:
# Don't discard stopwords if that's all you've got
words = [self.get_record_token(record) for record in analysis]
return words
def normalize(self, text, cache=None):
"""
Get a canonical string representation of this text, like
:meth:`normalize_list` but joined with spaces.
TODO: use the cache.
"""
return ' '.join(self.normalize_list(text, cache))
def tag_and_stem(self, text, cache=None):
"""
Given some text, return a sequence of (stem, pos, text) triples as
appropriate for the reader. `pos` can be as general or specific as
necessary (for example, it might label all parts of speech, or it might
only distinguish function words from others).
Twitter-style hashtags and at-mentions have the stem and pos they would
have without the leading # or @. For instance, if the reader's triple
for "thing" is ('thing', 'NN', 'things'), then "#things" would come out
as ('thing', 'NN', '#things').
"""
analysis = self.analyze(text)
triples = []
for record in analysis:
root = self.get_record_root(record)
token = self.get_record_token(record)
if token:
if unicode_is_punctuation(token):
triples.append((token, '.', token))
else:
pos = self.get_record_pos(record)
triples.append((root, pos, token))
return triples
def extract_phrases(self, text):
"""
Given some text, extract phrases of up to 2 content words,
and map their normalized form to the complete phrase.
"""
analysis = self.analyze(text)
for pos1 in range(len(analysis)):
rec1 = analysis[pos1]
if not self.is_stopword_record(rec1):
yield self.get_record_root(rec1), rec1[0]
for pos2 in range(pos1 + 1, len(analysis)):
rec2 = analysis[pos2]
if not self.is_stopword_record(rec2):
roots = [self.get_record_root(rec1),
self.get_record_root(rec2)]
pieces = [analysis[i][0] for i in range(pos1, pos2+1)]
term = ' '.join(roots)
phrase = ''.join(pieces)
yield term, phrase
break
|
commonsense/metanl | metanl/extprocess.py | ProcessWrapper.normalize_list | python | def normalize_list(self, text, cache=None):
words = []
analysis = self.analyze(text)
for record in analysis:
if not self.is_stopword_record(record):
words.append(self.get_record_root(record))
if not words:
# Don't discard stopwords if that's all you've got
words = [self.get_record_token(record) for record in analysis]
return words | Get a canonical list representation of text, with words
separated and reduced to their base forms.
TODO: use the cache. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/extprocess.py#L171-L186 | [
"def analyze(self, text):\n \"\"\"\n Take text as input, run it through the external process, and return a\n list of *records* containing the results.\n \"\"\"\n raise NotImplementedError\n",
"def is_stopword_record(self, record, common_words=False):\n \"\"\"\n Given a record, return whether ... | class ProcessWrapper(object):
"""
A ProcessWrapper uses the `subprocess` module to keep a process open that
we can pipe stuff through to get NLP results.
Instead of every instance immediately opening a process, however, it waits
until the first time it is needed, then starts the process.
Many methods are intended to be implemented by subclasses of ProcessWrapper
that actually know what program they're talking to.
"""
def __del__(self):
"""
Clean up by closing the pipe.
"""
if hasattr(self, '_process'):
self._process.stdin.close()
@property
def process(self):
"""
Store the actual process in _process. If it doesn't exist yet, create
it.
"""
if hasattr(self, '_process'):
return self._process
else:
self._process = self._get_process()
return self._process
def _get_command(self):
"""
This method should return the command to run, as a list
of arguments that can be used by subprocess.Popen.
"""
raise NotImplementedError
def _get_process(self):
"""
Create the process by running the specified command.
"""
command = self._get_command()
return subprocess.Popen(command, bufsize=-1, close_fds=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
def get_record_root(self, record):
"""
Given a *record* (the data that the external process returns for a
given single token), this specifies how to extract its root word
(aka its lemma).
"""
raise NotImplementedError
def get_record_token(self, record):
"""
Given a record, this specifies how to extract the exact word or token
that was processed.
"""
raise NotImplementedError
def analyze(self, text):
"""
Take text as input, run it through the external process, and return a
list of *records* containing the results.
"""
raise NotImplementedError
def send_input(self, data):
self.process.stdin.write(data)
self.process.stdin.flush()
def receive_output_line(self):
line = self.process.stdout.readline()
if not line:
raise ProcessError("reached end of output")
return line
def restart_process(self):
if hasattr(self, '_process'):
self._process.stdin.close()
self._process = self._get_process()
return self._process
def tokenize_list(self, text):
"""
Split a text into separate words.
"""
return [self.get_record_token(record) for record in self.analyze(text)]
def tokenize(self, text):
"""
Yell at people who are still using simplenlp's bad idea of
tokenization.
"""
raise NotImplementedError("tokenize is deprecated. Use tokenize_list.")
def is_stopword_record(self, record, common_words=False):
"""
Given a record, return whether it represents a stopword (a word that
should be discarded in NLP results).
Note that we want very few words to be stopwords. Words that are
meaningful but simply common can be recognized by their very high word
frequency, and handled appropriately. Often, we only want determiners
(such as 'a', 'an', and 'the' in English) to be stopwords.
Takes in a vestigial parameter, `common_words`, and ignores it.
"""
raise NotImplementedError
def is_stopword(self, text):
"""
Determine whether a single word is a stopword, or whether a short
phrase is made entirely of stopwords, disregarding context.
Use of this function should be avoided; it's better to give the text
in context and let the process determine which words are the stopwords.
"""
found_content_word = False
for record in self.analyze(text):
if not self.is_stopword_record(record):
found_content_word = True
break
return not found_content_word
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
This default implementation simply distinguishes stopwords from
non-stopwords.
"""
if self.is_stopword_record(record):
return 'STOP'
else:
return 'TERM'
def normalize(self, text, cache=None):
"""
Get a canonical string representation of this text, like
:meth:`normalize_list` but joined with spaces.
TODO: use the cache.
"""
return ' '.join(self.normalize_list(text, cache))
def tag_and_stem(self, text, cache=None):
"""
Given some text, return a sequence of (stem, pos, text) triples as
appropriate for the reader. `pos` can be as general or specific as
necessary (for example, it might label all parts of speech, or it might
only distinguish function words from others).
Twitter-style hashtags and at-mentions have the stem and pos they would
have without the leading # or @. For instance, if the reader's triple
for "thing" is ('thing', 'NN', 'things'), then "#things" would come out
as ('thing', 'NN', '#things').
"""
analysis = self.analyze(text)
triples = []
for record in analysis:
root = self.get_record_root(record)
token = self.get_record_token(record)
if token:
if unicode_is_punctuation(token):
triples.append((token, '.', token))
else:
pos = self.get_record_pos(record)
triples.append((root, pos, token))
return triples
def extract_phrases(self, text):
"""
Given some text, extract phrases of up to 2 content words,
and map their normalized form to the complete phrase.
"""
analysis = self.analyze(text)
for pos1 in range(len(analysis)):
rec1 = analysis[pos1]
if not self.is_stopword_record(rec1):
yield self.get_record_root(rec1), rec1[0]
for pos2 in range(pos1 + 1, len(analysis)):
rec2 = analysis[pos2]
if not self.is_stopword_record(rec2):
roots = [self.get_record_root(rec1),
self.get_record_root(rec2)]
pieces = [analysis[i][0] for i in range(pos1, pos2+1)]
term = ' '.join(roots)
phrase = ''.join(pieces)
yield term, phrase
break
|
commonsense/metanl | metanl/extprocess.py | ProcessWrapper.tag_and_stem | python | def tag_and_stem(self, text, cache=None):
analysis = self.analyze(text)
triples = []
for record in analysis:
root = self.get_record_root(record)
token = self.get_record_token(record)
if token:
if unicode_is_punctuation(token):
triples.append((token, '.', token))
else:
pos = self.get_record_pos(record)
triples.append((root, pos, token))
return triples | Given some text, return a sequence of (stem, pos, text) triples as
appropriate for the reader. `pos` can be as general or specific as
necessary (for example, it might label all parts of speech, or it might
only distinguish function words from others).
Twitter-style hashtags and at-mentions have the stem and pos they would
have without the leading # or @. For instance, if the reader's triple
for "thing" is ('thing', 'NN', 'things'), then "#things" would come out
as ('thing', 'NN', '#things'). | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/extprocess.py#L197-L222 | [
"def unicode_is_punctuation(text):\n \"\"\"\n Test if a token is made entirely of Unicode characters of the following\n classes:\n\n - P: punctuation\n - S: symbols\n - Z: separators\n - M: combining marks\n - C: control characters\n\n >>> unicode_is_punctuation('word')\n False\n >>... | class ProcessWrapper(object):
"""
A ProcessWrapper uses the `subprocess` module to keep a process open that
we can pipe stuff through to get NLP results.
Instead of every instance immediately opening a process, however, it waits
until the first time it is needed, then starts the process.
Many methods are intended to be implemented by subclasses of ProcessWrapper
that actually know what program they're talking to.
"""
def __del__(self):
"""
Clean up by closing the pipe.
"""
if hasattr(self, '_process'):
self._process.stdin.close()
@property
def process(self):
"""
Store the actual process in _process. If it doesn't exist yet, create
it.
"""
if hasattr(self, '_process'):
return self._process
else:
self._process = self._get_process()
return self._process
def _get_command(self):
"""
This method should return the command to run, as a list
of arguments that can be used by subprocess.Popen.
"""
raise NotImplementedError
def _get_process(self):
"""
Create the process by running the specified command.
"""
command = self._get_command()
return subprocess.Popen(command, bufsize=-1, close_fds=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
def get_record_root(self, record):
"""
Given a *record* (the data that the external process returns for a
given single token), this specifies how to extract its root word
(aka its lemma).
"""
raise NotImplementedError
def get_record_token(self, record):
"""
Given a record, this specifies how to extract the exact word or token
that was processed.
"""
raise NotImplementedError
def analyze(self, text):
"""
Take text as input, run it through the external process, and return a
list of *records* containing the results.
"""
raise NotImplementedError
def send_input(self, data):
self.process.stdin.write(data)
self.process.stdin.flush()
def receive_output_line(self):
line = self.process.stdout.readline()
if not line:
raise ProcessError("reached end of output")
return line
def restart_process(self):
if hasattr(self, '_process'):
self._process.stdin.close()
self._process = self._get_process()
return self._process
def tokenize_list(self, text):
"""
Split a text into separate words.
"""
return [self.get_record_token(record) for record in self.analyze(text)]
def tokenize(self, text):
"""
Yell at people who are still using simplenlp's bad idea of
tokenization.
"""
raise NotImplementedError("tokenize is deprecated. Use tokenize_list.")
def is_stopword_record(self, record, common_words=False):
"""
Given a record, return whether it represents a stopword (a word that
should be discarded in NLP results).
Note that we want very few words to be stopwords. Words that are
meaningful but simply common can be recognized by their very high word
frequency, and handled appropriately. Often, we only want determiners
(such as 'a', 'an', and 'the' in English) to be stopwords.
Takes in a vestigial parameter, `common_words`, and ignores it.
"""
raise NotImplementedError
def is_stopword(self, text):
"""
Determine whether a single word is a stopword, or whether a short
phrase is made entirely of stopwords, disregarding context.
Use of this function should be avoided; it's better to give the text
in context and let the process determine which words are the stopwords.
"""
found_content_word = False
for record in self.analyze(text):
if not self.is_stopword_record(record):
found_content_word = True
break
return not found_content_word
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
This default implementation simply distinguishes stopwords from
non-stopwords.
"""
if self.is_stopword_record(record):
return 'STOP'
else:
return 'TERM'
def normalize_list(self, text, cache=None):
"""
Get a canonical list representation of text, with words
separated and reduced to their base forms.
TODO: use the cache.
"""
words = []
analysis = self.analyze(text)
for record in analysis:
if not self.is_stopword_record(record):
words.append(self.get_record_root(record))
if not words:
# Don't discard stopwords if that's all you've got
words = [self.get_record_token(record) for record in analysis]
return words
def normalize(self, text, cache=None):
"""
Get a canonical string representation of this text, like
:meth:`normalize_list` but joined with spaces.
TODO: use the cache.
"""
return ' '.join(self.normalize_list(text, cache))
def extract_phrases(self, text):
"""
Given some text, extract phrases of up to 2 content words,
and map their normalized form to the complete phrase.
"""
analysis = self.analyze(text)
for pos1 in range(len(analysis)):
rec1 = analysis[pos1]
if not self.is_stopword_record(rec1):
yield self.get_record_root(rec1), rec1[0]
for pos2 in range(pos1 + 1, len(analysis)):
rec2 = analysis[pos2]
if not self.is_stopword_record(rec2):
roots = [self.get_record_root(rec1),
self.get_record_root(rec2)]
pieces = [analysis[i][0] for i in range(pos1, pos2+1)]
term = ' '.join(roots)
phrase = ''.join(pieces)
yield term, phrase
break
|
commonsense/metanl | metanl/extprocess.py | ProcessWrapper.extract_phrases | python | def extract_phrases(self, text):
analysis = self.analyze(text)
for pos1 in range(len(analysis)):
rec1 = analysis[pos1]
if not self.is_stopword_record(rec1):
yield self.get_record_root(rec1), rec1[0]
for pos2 in range(pos1 + 1, len(analysis)):
rec2 = analysis[pos2]
if not self.is_stopword_record(rec2):
roots = [self.get_record_root(rec1),
self.get_record_root(rec2)]
pieces = [analysis[i][0] for i in range(pos1, pos2+1)]
term = ' '.join(roots)
phrase = ''.join(pieces)
yield term, phrase
break | Given some text, extract phrases of up to 2 content words,
and map their normalized form to the complete phrase. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/extprocess.py#L224-L243 | [
"def analyze(self, text):\n \"\"\"\n Take text as input, run it through the external process, and return a\n list of *records* containing the results.\n \"\"\"\n raise NotImplementedError\n",
"def is_stopword_record(self, record, common_words=False):\n \"\"\"\n Given a record, return whether ... | class ProcessWrapper(object):
"""
A ProcessWrapper uses the `subprocess` module to keep a process open that
we can pipe stuff through to get NLP results.
Instead of every instance immediately opening a process, however, it waits
until the first time it is needed, then starts the process.
Many methods are intended to be implemented by subclasses of ProcessWrapper
that actually know what program they're talking to.
"""
def __del__(self):
"""
Clean up by closing the pipe.
"""
if hasattr(self, '_process'):
self._process.stdin.close()
@property
def process(self):
"""
Store the actual process in _process. If it doesn't exist yet, create
it.
"""
if hasattr(self, '_process'):
return self._process
else:
self._process = self._get_process()
return self._process
def _get_command(self):
"""
This method should return the command to run, as a list
of arguments that can be used by subprocess.Popen.
"""
raise NotImplementedError
def _get_process(self):
"""
Create the process by running the specified command.
"""
command = self._get_command()
return subprocess.Popen(command, bufsize=-1, close_fds=True,
stdout=subprocess.PIPE,
stdin=subprocess.PIPE)
def get_record_root(self, record):
"""
Given a *record* (the data that the external process returns for a
given single token), this specifies how to extract its root word
(aka its lemma).
"""
raise NotImplementedError
def get_record_token(self, record):
"""
Given a record, this specifies how to extract the exact word or token
that was processed.
"""
raise NotImplementedError
def analyze(self, text):
"""
Take text as input, run it through the external process, and return a
list of *records* containing the results.
"""
raise NotImplementedError
def send_input(self, data):
self.process.stdin.write(data)
self.process.stdin.flush()
def receive_output_line(self):
line = self.process.stdout.readline()
if not line:
raise ProcessError("reached end of output")
return line
def restart_process(self):
if hasattr(self, '_process'):
self._process.stdin.close()
self._process = self._get_process()
return self._process
def tokenize_list(self, text):
"""
Split a text into separate words.
"""
return [self.get_record_token(record) for record in self.analyze(text)]
def tokenize(self, text):
"""
Yell at people who are still using simplenlp's bad idea of
tokenization.
"""
raise NotImplementedError("tokenize is deprecated. Use tokenize_list.")
def is_stopword_record(self, record, common_words=False):
"""
Given a record, return whether it represents a stopword (a word that
should be discarded in NLP results).
Note that we want very few words to be stopwords. Words that are
meaningful but simply common can be recognized by their very high word
frequency, and handled appropriately. Often, we only want determiners
(such as 'a', 'an', and 'the' in English) to be stopwords.
Takes in a vestigial parameter, `common_words`, and ignores it.
"""
raise NotImplementedError
def is_stopword(self, text):
"""
Determine whether a single word is a stopword, or whether a short
phrase is made entirely of stopwords, disregarding context.
Use of this function should be avoided; it's better to give the text
in context and let the process determine which words are the stopwords.
"""
found_content_word = False
for record in self.analyze(text):
if not self.is_stopword_record(record):
found_content_word = True
break
return not found_content_word
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
This default implementation simply distinguishes stopwords from
non-stopwords.
"""
if self.is_stopword_record(record):
return 'STOP'
else:
return 'TERM'
def normalize_list(self, text, cache=None):
"""
Get a canonical list representation of text, with words
separated and reduced to their base forms.
TODO: use the cache.
"""
words = []
analysis = self.analyze(text)
for record in analysis:
if not self.is_stopword_record(record):
words.append(self.get_record_root(record))
if not words:
# Don't discard stopwords if that's all you've got
words = [self.get_record_token(record) for record in analysis]
return words
def normalize(self, text, cache=None):
"""
Get a canonical string representation of this text, like
:meth:`normalize_list` but joined with spaces.
TODO: use the cache.
"""
return ' '.join(self.normalize_list(text, cache))
def tag_and_stem(self, text, cache=None):
"""
Given some text, return a sequence of (stem, pos, text) triples as
appropriate for the reader. `pos` can be as general or specific as
necessary (for example, it might label all parts of speech, or it might
only distinguish function words from others).
Twitter-style hashtags and at-mentions have the stem and pos they would
have without the leading # or @. For instance, if the reader's triple
for "thing" is ('thing', 'NN', 'things'), then "#things" would come out
as ('thing', 'NN', '#things').
"""
analysis = self.analyze(text)
triples = []
for record in analysis:
root = self.get_record_root(record)
token = self.get_record_token(record)
if token:
if unicode_is_punctuation(token):
triples.append((token, '.', token))
else:
pos = self.get_record_pos(record)
triples.append((root, pos, token))
return triples
|
commonsense/metanl | metanl/mecab.py | to_kana | python | def to_kana(text):
records = MECAB.analyze(text)
kana = []
for record in records:
if record.pronunciation:
kana.append(record.pronunciation)
elif record.reading:
kana.append(record.reading)
else:
kana.append(record.surface)
return ' '.join(k for k in kana if k) | Use MeCab to turn any text into its phonetic spelling, as katakana
separated by spaces. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/mecab.py#L208-L222 | [
"def analyze(self, text):\n \"\"\"\n Runs a line of text through MeCab, and returns the results as a\n list of lists (\"records\") that contain the MeCab analysis of each\n word.\n \"\"\"\n try:\n self.process # make sure things are loaded\n text = render_safe(text).replace('\\n', '... | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
"""
This module provides some basic Japanese NLP by wrapping the output of MeCab.
It can tokenize and normalize Japanese words, detect and remove stopwords,
and it can even respell words in kana or romaji.
This requires mecab to be installed separately. On Ubuntu:
sudo apt-get install mecab mecab-ipadic-utf8
>>> print(normalize('これはテストです'))
テスト
>>> tag_and_stem('これはテストです。')
[('\u3053\u308c', '~\u540d\u8a5e', '\u3053\u308c'), ('\u306f', '~\u52a9\u8a5e', '\u306f'), ('\u30c6\u30b9\u30c8', '\u540d\u8a5e', '\u30c6\u30b9\u30c8'), ('\u3067\u3059', '~\u52a9\u52d5\u8a5e', '\u3067\u3059'), ('\u3002', '.', '\u3002')]
"""
from metanl.token_utils import string_pieces
from metanl.extprocess import ProcessWrapper, ProcessError, render_safe
from collections import namedtuple
import unicodedata
import re
import sys
if sys.version_info.major == 2:
range = xrange
str_func = unicode
else:
str_func = str
class MeCabError(ProcessError):
pass
MeCabRecord = namedtuple('MeCabRecord',
[
'surface',
'pos',
'subclass1',
'subclass2',
'subclass3',
'conjugation',
'form',
'root',
'reading',
'pronunciation'
]
)
# MeCab outputs the part of speech of its terms. We can simply identify
# particular (coarse or fine) parts of speech as containing stopwords.
STOPWORD_CATEGORIES = set([
'助詞', # coarse: particle
'助動詞', # coarse: auxiliary verb
'接続詞', # coarse: conjunction
'フィラー', # coarse: filler
'記号', # coarse: symbol
'非自立', # fine: 'not independent'
])
# Forms of particular words should also be considered stopwords sometimes.
#
# A thought: Should the rare kanji version of suru not be a stopword?
# I'll need to ask someone who knows more Japanese, but it may be
# that if they're using the kanji it's for particular emphasis.
STOPWORD_ROOTS = set([
'する', # suru: "to do"
'為る', # suru in kanji (very rare)
'くる', # kuru: "to come"
'来る', # kuru in kanji
'いく', # iku: "to go"
'行く', # iku in kanji
'いる', # iru: "to be" (animate)
'居る', # iru in kanji
'ある', # aru: "to exist" or "to have"
'有る', # aru in kanji
'もの', # mono: "thing"
'物', # mono in kanji
'よう', # yō: "way"
'様', # yō in kanji
'れる', # passive suffix
'これ', # kore: "this"
'それ', # sore: "that"
'あれ', # are: "that over there"
'この', # kono: "this"
'その', # sono: "that"
'あの', # ano: "that over there", "yon"
])
class MeCabWrapper(ProcessWrapper):
"""
Handle Japanese text using the command-line version of MeCab.
(mecab-python is convenient, but its installer is too flaky to rely on.)
ja_cabocha gives more sophisticated results, but requires a large number of
additional dependencies. Using this tool for Japanese requires only
MeCab to be installed and accepting UTF-8 text.
"""
def _get_command(self):
return ['mecab']
def _get_process(self):
try:
proc = ProcessWrapper._get_process(self)
except (OSError, ProcessError):
raise MeCabError("MeCab didn't start. See README.txt for details "
"about installing MeCab and other Japanese NLP "
"tools.")
return proc
def get_record_root(self, record):
"""
Given a MeCab record, return the root word.
"""
if record.root == '*':
return record.surface
else:
return record.root
def get_record_token(self, record):
return record.surface
def analyze(self, text):
"""
Runs a line of text through MeCab, and returns the results as a
list of lists ("records") that contain the MeCab analysis of each
word.
"""
try:
self.process # make sure things are loaded
text = render_safe(text).replace('\n', ' ').lower()
results = []
for chunk in string_pieces(text):
self.send_input((chunk + '\n').encode('utf-8'))
while True:
out_line = self.receive_output_line().decode('utf-8')
if out_line == 'EOS\n':
break
word, info = out_line.strip('\n').split('\t')
record_parts = [word] + info.split(',')
# Pad the record out to have 10 parts if it doesn't
record_parts += [None] * (10 - len(record_parts))
record = MeCabRecord(*record_parts)
# special case for detecting nai -> n
if (record.surface == 'ん' and
record.conjugation == '不変化型'):
# rebuild the record so that record.root is 'nai'
record_parts[MeCabRecord._fields.index('root')] = 'ない'
record = MeCabRecord(*record_parts)
results.append(record)
return results
except ProcessError:
self.restart_process()
return self.analyze(text)
def is_stopword_record(self, record):
"""
Determine whether a single MeCab record represents a stopword.
This mostly determines words to strip based on their parts of speech.
If common_words is set to True (default), it will also strip common
verbs and nouns such as くる and よう. If more_stopwords is True, it
will look at the sub-part of speech to remove more categories.
"""
# preserve negations
if record.root == 'ない':
return False
return (
record.pos in STOPWORD_CATEGORIES or
record.subclass1 in STOPWORD_CATEGORIES or
record.root in STOPWORD_ROOTS
)
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
Here we're going to return MeCab's part of speech (written in
Japanese), though if it's a stopword we prefix the part of speech
with '~'.
"""
if self.is_stopword_record(record):
return '~' + record.pos
else:
return record.pos
class NoStopwordMeCabWrapper(MeCabWrapper):
"""
This version of the MeCabWrapper doesn't label anything as a stopword. It's
used in building ConceptNet because discarding stopwords based on MeCab
categories loses too much information.
"""
def is_stopword_record(self, record, common_words=False):
return False
# Define the classes of characters we'll be trying to transliterate
NOT_KANA, KANA, NN, SMALL, SMALL_Y, SMALL_TSU, PROLONG = range(7)
def get_kana_info(char):
"""
Return two things about each character:
- Its transliterated value (in Roman characters, if it's a kana)
- A class of characters indicating how it affects the romanization
"""
try:
name = unicodedata.name(char)
except ValueError:
return char, NOT_KANA
# The names we're dealing with will probably look like
# "KATAKANA CHARACTER ZI".
if (name.startswith('HIRAGANA LETTER') or
name.startswith('KATAKANA LETTER') or
name.startswith('KATAKANA-HIRAGANA')):
names = name.split()
syllable = str_func(names[-1].lower())
if name.endswith('SMALL TU'):
# The small tsu (っ) doubles the following consonant.
# It'll show up as 't' on its own.
return 't', SMALL_TSU
elif names[-1] == 'N':
return 'n', NN
elif names[1] == 'PROLONGED':
# The prolongation marker doubles the previous vowel.
# It'll show up as '_' on its own.
return '_', PROLONG
elif names[-2] == 'SMALL':
# Small characters tend to modify the sound of the previous
# kana. If they can't modify anything, they're appended to
# the letter 'x' instead.
if syllable.startswith('y'):
return 'x' + syllable, SMALL_Y
else:
return 'x' + syllable, SMALL
return syllable, KANA
else:
if char in ROMAN_PUNCTUATION_TABLE:
char = ROMAN_PUNCTUATION_TABLE[char]
return char, NOT_KANA
def respell_hepburn(syllable):
while syllable[:2] in HEPBURN_TABLE:
syllable = HEPBURN_TABLE[syllable[:2]] + syllable[2:]
return syllable
def romanize(text, respell=respell_hepburn):
if respell is None:
respell = lambda x: x
kana = to_kana(str_func(text))
pieces = []
prevgroup = NOT_KANA
for char in kana:
roman, group = get_kana_info(char)
if prevgroup == NN:
# When the previous syllable is 'n' and the next syllable would
# make it ambiguous, add an apostrophe.
if group != KANA or roman[0] in 'aeinouy':
if unicodedata.category(roman[0])[0] == 'L':
pieces[-1] += "'"
# Determine how to spell the current character
if group == NOT_KANA:
pieces.append(roman)
elif group == SMALL_TSU or group == NN:
pieces.append(roman)
elif group == SMALL_Y:
if prevgroup == KANA:
# Modify the previous syllable, if that makes sense. For
# example, 'ni' + 'ya' becomes 'nya'.
if not pieces[-1].endswith('i'):
pieces.append(roman)
else:
modifier = roman[1:]
modified = pieces[-1]
pieces[-1] = modified[:-1] + modifier
else:
pieces.append(roman)
elif group == SMALL:
# Don't respell small vowels _yet_. We'll handle that at the end.
# This may be a bit ambiguous, but nobody expects to see "tea"
# spelled "texi".
pieces.append(roman)
elif group == PROLONG:
if prevgroup in (KANA, SMALL_Y, SMALL):
pieces[-1] = pieces[-1][:-1] + respell(pieces[-1][-1] + '_')
else:
pieces.append(roman)
else: # this is a normal kana
if prevgroup == SMALL_TSU:
if roman[0] in 'aeiouy':
# wait, there's no consonant there; cope by respelling the
# previous kana as 't-'
pieces[-1] = 't-'
else:
# Turn the previous 't' into a copy of the first consonant
pieces[-1] = roman[0]
elif prevgroup == NN:
# Let Hepburn respell 'n' as 'm' in words such as 'shimbun'.
try_respell = respell(pieces[-1] + roman[0])
if try_respell[:-1] != pieces[-1]:
pieces[-1] = try_respell[:-1]
pieces.append(roman)
prevgroup = group
romantext = ''.join(respell(piece) for piece in pieces)
romantext = re.sub(r'[aeiou]x([aeiou])', r'\1', romantext)
return romantext
# Hepburn romanization is the most familiar to English speakers. It involves
# respelling certain parts of romanized words to better match their
# pronunciation. For example, the name for Mount Fuji is respelled from
# "huzi-san" to "fuji-san".
HEPBURN_TABLE = {
'si': 'shi',
'sy': 'sh',
'ti': 'chi',
'ty': 'ch',
'tu': 'tsu',
'hu': 'fu',
'zi': 'ji',
'di': 'ji',
'zy': 'j',
'dy': 'j',
'nm': 'mm',
'nb': 'mb',
'np': 'mp',
'a_': 'aa',
'e_': 'ee',
'i_': 'ii',
'o_': 'ou',
'u_': 'uu'
}
ROMAN_PUNCTUATION_TABLE = {
'・': '.',
'。': '.',
'、': ',',
'!': '!',
'「': '``',
'」': "''",
'?': '?',
'〜': '~'
}
# Provide externally available functions.
MECAB = MeCabWrapper()
normalize = MECAB.normalize
normalize_list = MECAB.normalize_list
tokenize = MECAB.tokenize
tokenize_list = MECAB.tokenize_list
analyze = MECAB.analyze
tag_and_stem = MECAB.tag_and_stem
is_stopword = MECAB.is_stopword
|
commonsense/metanl | metanl/mecab.py | get_kana_info | python | def get_kana_info(char):
try:
name = unicodedata.name(char)
except ValueError:
return char, NOT_KANA
# The names we're dealing with will probably look like
# "KATAKANA CHARACTER ZI".
if (name.startswith('HIRAGANA LETTER') or
name.startswith('KATAKANA LETTER') or
name.startswith('KATAKANA-HIRAGANA')):
names = name.split()
syllable = str_func(names[-1].lower())
if name.endswith('SMALL TU'):
# The small tsu (っ) doubles the following consonant.
# It'll show up as 't' on its own.
return 't', SMALL_TSU
elif names[-1] == 'N':
return 'n', NN
elif names[1] == 'PROLONGED':
# The prolongation marker doubles the previous vowel.
# It'll show up as '_' on its own.
return '_', PROLONG
elif names[-2] == 'SMALL':
# Small characters tend to modify the sound of the previous
# kana. If they can't modify anything, they're appended to
# the letter 'x' instead.
if syllable.startswith('y'):
return 'x' + syllable, SMALL_Y
else:
return 'x' + syllable, SMALL
return syllable, KANA
else:
if char in ROMAN_PUNCTUATION_TABLE:
char = ROMAN_PUNCTUATION_TABLE[char]
return char, NOT_KANA | Return two things about each character:
- Its transliterated value (in Roman characters, if it's a kana)
- A class of characters indicating how it affects the romanization | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/mecab.py#L225-L268 | null | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
"""
This module provides some basic Japanese NLP by wrapping the output of MeCab.
It can tokenize and normalize Japanese words, detect and remove stopwords,
and it can even respell words in kana or romaji.
This requires mecab to be installed separately. On Ubuntu:
sudo apt-get install mecab mecab-ipadic-utf8
>>> print(normalize('これはテストです'))
テスト
>>> tag_and_stem('これはテストです。')
[('\u3053\u308c', '~\u540d\u8a5e', '\u3053\u308c'), ('\u306f', '~\u52a9\u8a5e', '\u306f'), ('\u30c6\u30b9\u30c8', '\u540d\u8a5e', '\u30c6\u30b9\u30c8'), ('\u3067\u3059', '~\u52a9\u52d5\u8a5e', '\u3067\u3059'), ('\u3002', '.', '\u3002')]
"""
from metanl.token_utils import string_pieces
from metanl.extprocess import ProcessWrapper, ProcessError, render_safe
from collections import namedtuple
import unicodedata
import re
import sys
if sys.version_info.major == 2:
range = xrange
str_func = unicode
else:
str_func = str
class MeCabError(ProcessError):
pass
MeCabRecord = namedtuple('MeCabRecord',
[
'surface',
'pos',
'subclass1',
'subclass2',
'subclass3',
'conjugation',
'form',
'root',
'reading',
'pronunciation'
]
)
# MeCab outputs the part of speech of its terms. We can simply identify
# particular (coarse or fine) parts of speech as containing stopwords.
STOPWORD_CATEGORIES = set([
'助詞', # coarse: particle
'助動詞', # coarse: auxiliary verb
'接続詞', # coarse: conjunction
'フィラー', # coarse: filler
'記号', # coarse: symbol
'非自立', # fine: 'not independent'
])
# Forms of particular words should also be considered stopwords sometimes.
#
# A thought: Should the rare kanji version of suru not be a stopword?
# I'll need to ask someone who knows more Japanese, but it may be
# that if they're using the kanji it's for particular emphasis.
STOPWORD_ROOTS = set([
'する', # suru: "to do"
'為る', # suru in kanji (very rare)
'くる', # kuru: "to come"
'来る', # kuru in kanji
'いく', # iku: "to go"
'行く', # iku in kanji
'いる', # iru: "to be" (animate)
'居る', # iru in kanji
'ある', # aru: "to exist" or "to have"
'有る', # aru in kanji
'もの', # mono: "thing"
'物', # mono in kanji
'よう', # yō: "way"
'様', # yō in kanji
'れる', # passive suffix
'これ', # kore: "this"
'それ', # sore: "that"
'あれ', # are: "that over there"
'この', # kono: "this"
'その', # sono: "that"
'あの', # ano: "that over there", "yon"
])
class MeCabWrapper(ProcessWrapper):
"""
Handle Japanese text using the command-line version of MeCab.
(mecab-python is convenient, but its installer is too flaky to rely on.)
ja_cabocha gives more sophisticated results, but requires a large number of
additional dependencies. Using this tool for Japanese requires only
MeCab to be installed and accepting UTF-8 text.
"""
def _get_command(self):
return ['mecab']
def _get_process(self):
try:
proc = ProcessWrapper._get_process(self)
except (OSError, ProcessError):
raise MeCabError("MeCab didn't start. See README.txt for details "
"about installing MeCab and other Japanese NLP "
"tools.")
return proc
def get_record_root(self, record):
"""
Given a MeCab record, return the root word.
"""
if record.root == '*':
return record.surface
else:
return record.root
def get_record_token(self, record):
return record.surface
def analyze(self, text):
"""
Runs a line of text through MeCab, and returns the results as a
list of lists ("records") that contain the MeCab analysis of each
word.
"""
try:
self.process # make sure things are loaded
text = render_safe(text).replace('\n', ' ').lower()
results = []
for chunk in string_pieces(text):
self.send_input((chunk + '\n').encode('utf-8'))
while True:
out_line = self.receive_output_line().decode('utf-8')
if out_line == 'EOS\n':
break
word, info = out_line.strip('\n').split('\t')
record_parts = [word] + info.split(',')
# Pad the record out to have 10 parts if it doesn't
record_parts += [None] * (10 - len(record_parts))
record = MeCabRecord(*record_parts)
# special case for detecting nai -> n
if (record.surface == 'ん' and
record.conjugation == '不変化型'):
# rebuild the record so that record.root is 'nai'
record_parts[MeCabRecord._fields.index('root')] = 'ない'
record = MeCabRecord(*record_parts)
results.append(record)
return results
except ProcessError:
self.restart_process()
return self.analyze(text)
def is_stopword_record(self, record):
"""
Determine whether a single MeCab record represents a stopword.
This mostly determines words to strip based on their parts of speech.
If common_words is set to True (default), it will also strip common
verbs and nouns such as くる and よう. If more_stopwords is True, it
will look at the sub-part of speech to remove more categories.
"""
# preserve negations
if record.root == 'ない':
return False
return (
record.pos in STOPWORD_CATEGORIES or
record.subclass1 in STOPWORD_CATEGORIES or
record.root in STOPWORD_ROOTS
)
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
Here we're going to return MeCab's part of speech (written in
Japanese), though if it's a stopword we prefix the part of speech
with '~'.
"""
if self.is_stopword_record(record):
return '~' + record.pos
else:
return record.pos
class NoStopwordMeCabWrapper(MeCabWrapper):
"""
This version of the MeCabWrapper doesn't label anything as a stopword. It's
used in building ConceptNet because discarding stopwords based on MeCab
categories loses too much information.
"""
def is_stopword_record(self, record, common_words=False):
return False
# Define the classes of characters we'll be trying to transliterate
NOT_KANA, KANA, NN, SMALL, SMALL_Y, SMALL_TSU, PROLONG = range(7)
def to_kana(text):
"""
Use MeCab to turn any text into its phonetic spelling, as katakana
separated by spaces.
"""
records = MECAB.analyze(text)
kana = []
for record in records:
if record.pronunciation:
kana.append(record.pronunciation)
elif record.reading:
kana.append(record.reading)
else:
kana.append(record.surface)
return ' '.join(k for k in kana if k)
def respell_hepburn(syllable):
while syllable[:2] in HEPBURN_TABLE:
syllable = HEPBURN_TABLE[syllable[:2]] + syllable[2:]
return syllable
def romanize(text, respell=respell_hepburn):
if respell is None:
respell = lambda x: x
kana = to_kana(str_func(text))
pieces = []
prevgroup = NOT_KANA
for char in kana:
roman, group = get_kana_info(char)
if prevgroup == NN:
# When the previous syllable is 'n' and the next syllable would
# make it ambiguous, add an apostrophe.
if group != KANA or roman[0] in 'aeinouy':
if unicodedata.category(roman[0])[0] == 'L':
pieces[-1] += "'"
# Determine how to spell the current character
if group == NOT_KANA:
pieces.append(roman)
elif group == SMALL_TSU or group == NN:
pieces.append(roman)
elif group == SMALL_Y:
if prevgroup == KANA:
# Modify the previous syllable, if that makes sense. For
# example, 'ni' + 'ya' becomes 'nya'.
if not pieces[-1].endswith('i'):
pieces.append(roman)
else:
modifier = roman[1:]
modified = pieces[-1]
pieces[-1] = modified[:-1] + modifier
else:
pieces.append(roman)
elif group == SMALL:
# Don't respell small vowels _yet_. We'll handle that at the end.
# This may be a bit ambiguous, but nobody expects to see "tea"
# spelled "texi".
pieces.append(roman)
elif group == PROLONG:
if prevgroup in (KANA, SMALL_Y, SMALL):
pieces[-1] = pieces[-1][:-1] + respell(pieces[-1][-1] + '_')
else:
pieces.append(roman)
else: # this is a normal kana
if prevgroup == SMALL_TSU:
if roman[0] in 'aeiouy':
# wait, there's no consonant there; cope by respelling the
# previous kana as 't-'
pieces[-1] = 't-'
else:
# Turn the previous 't' into a copy of the first consonant
pieces[-1] = roman[0]
elif prevgroup == NN:
# Let Hepburn respell 'n' as 'm' in words such as 'shimbun'.
try_respell = respell(pieces[-1] + roman[0])
if try_respell[:-1] != pieces[-1]:
pieces[-1] = try_respell[:-1]
pieces.append(roman)
prevgroup = group
romantext = ''.join(respell(piece) for piece in pieces)
romantext = re.sub(r'[aeiou]x([aeiou])', r'\1', romantext)
return romantext
# Hepburn romanization is the most familiar to English speakers. It involves
# respelling certain parts of romanized words to better match their
# pronunciation. For example, the name for Mount Fuji is respelled from
# "huzi-san" to "fuji-san".
HEPBURN_TABLE = {
'si': 'shi',
'sy': 'sh',
'ti': 'chi',
'ty': 'ch',
'tu': 'tsu',
'hu': 'fu',
'zi': 'ji',
'di': 'ji',
'zy': 'j',
'dy': 'j',
'nm': 'mm',
'nb': 'mb',
'np': 'mp',
'a_': 'aa',
'e_': 'ee',
'i_': 'ii',
'o_': 'ou',
'u_': 'uu'
}
ROMAN_PUNCTUATION_TABLE = {
'・': '.',
'。': '.',
'、': ',',
'!': '!',
'「': '``',
'」': "''",
'?': '?',
'〜': '~'
}
# Provide externally available functions.
MECAB = MeCabWrapper()
normalize = MECAB.normalize
normalize_list = MECAB.normalize_list
tokenize = MECAB.tokenize
tokenize_list = MECAB.tokenize_list
analyze = MECAB.analyze
tag_and_stem = MECAB.tag_and_stem
is_stopword = MECAB.is_stopword
|
commonsense/metanl | metanl/mecab.py | MeCabWrapper.analyze | python | def analyze(self, text):
try:
self.process # make sure things are loaded
text = render_safe(text).replace('\n', ' ').lower()
results = []
for chunk in string_pieces(text):
self.send_input((chunk + '\n').encode('utf-8'))
while True:
out_line = self.receive_output_line().decode('utf-8')
if out_line == 'EOS\n':
break
word, info = out_line.strip('\n').split('\t')
record_parts = [word] + info.split(',')
# Pad the record out to have 10 parts if it doesn't
record_parts += [None] * (10 - len(record_parts))
record = MeCabRecord(*record_parts)
# special case for detecting nai -> n
if (record.surface == 'ん' and
record.conjugation == '不変化型'):
# rebuild the record so that record.root is 'nai'
record_parts[MeCabRecord._fields.index('root')] = 'ない'
record = MeCabRecord(*record_parts)
results.append(record)
return results
except ProcessError:
self.restart_process()
return self.analyze(text) | Runs a line of text through MeCab, and returns the results as a
list of lists ("records") that contain the MeCab analysis of each
word. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/mecab.py#L125-L160 | [
"def render_safe(text):\n '''\n Make sure the given text is safe to pass to an external process.\n '''\n return remove_control_chars(remove_unsafe_private_use(text))\n",
"def string_pieces(s, maxlen=1024):\n \"\"\"\n Takes a (unicode) string and yields pieces of it that are at most `maxlen`\n ... | class MeCabWrapper(ProcessWrapper):
"""
Handle Japanese text using the command-line version of MeCab.
(mecab-python is convenient, but its installer is too flaky to rely on.)
ja_cabocha gives more sophisticated results, but requires a large number of
additional dependencies. Using this tool for Japanese requires only
MeCab to be installed and accepting UTF-8 text.
"""
def _get_command(self):
return ['mecab']
def _get_process(self):
try:
proc = ProcessWrapper._get_process(self)
except (OSError, ProcessError):
raise MeCabError("MeCab didn't start. See README.txt for details "
"about installing MeCab and other Japanese NLP "
"tools.")
return proc
def get_record_root(self, record):
"""
Given a MeCab record, return the root word.
"""
if record.root == '*':
return record.surface
else:
return record.root
def get_record_token(self, record):
return record.surface
def is_stopword_record(self, record):
"""
Determine whether a single MeCab record represents a stopword.
This mostly determines words to strip based on their parts of speech.
If common_words is set to True (default), it will also strip common
verbs and nouns such as くる and よう. If more_stopwords is True, it
will look at the sub-part of speech to remove more categories.
"""
# preserve negations
if record.root == 'ない':
return False
return (
record.pos in STOPWORD_CATEGORIES or
record.subclass1 in STOPWORD_CATEGORIES or
record.root in STOPWORD_ROOTS
)
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
Here we're going to return MeCab's part of speech (written in
Japanese), though if it's a stopword we prefix the part of speech
with '~'.
"""
if self.is_stopword_record(record):
return '~' + record.pos
else:
return record.pos
|
commonsense/metanl | metanl/mecab.py | MeCabWrapper.is_stopword_record | python | def is_stopword_record(self, record):
# preserve negations
if record.root == 'ない':
return False
return (
record.pos in STOPWORD_CATEGORIES or
record.subclass1 in STOPWORD_CATEGORIES or
record.root in STOPWORD_ROOTS
) | Determine whether a single MeCab record represents a stopword.
This mostly determines words to strip based on their parts of speech.
If common_words is set to True (default), it will also strip common
verbs and nouns such as くる and よう. If more_stopwords is True, it
will look at the sub-part of speech to remove more categories. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/mecab.py#L162-L178 | null | class MeCabWrapper(ProcessWrapper):
"""
Handle Japanese text using the command-line version of MeCab.
(mecab-python is convenient, but its installer is too flaky to rely on.)
ja_cabocha gives more sophisticated results, but requires a large number of
additional dependencies. Using this tool for Japanese requires only
MeCab to be installed and accepting UTF-8 text.
"""
def _get_command(self):
return ['mecab']
def _get_process(self):
try:
proc = ProcessWrapper._get_process(self)
except (OSError, ProcessError):
raise MeCabError("MeCab didn't start. See README.txt for details "
"about installing MeCab and other Japanese NLP "
"tools.")
return proc
def get_record_root(self, record):
"""
Given a MeCab record, return the root word.
"""
if record.root == '*':
return record.surface
else:
return record.root
def get_record_token(self, record):
return record.surface
def analyze(self, text):
"""
Runs a line of text through MeCab, and returns the results as a
list of lists ("records") that contain the MeCab analysis of each
word.
"""
try:
self.process # make sure things are loaded
text = render_safe(text).replace('\n', ' ').lower()
results = []
for chunk in string_pieces(text):
self.send_input((chunk + '\n').encode('utf-8'))
while True:
out_line = self.receive_output_line().decode('utf-8')
if out_line == 'EOS\n':
break
word, info = out_line.strip('\n').split('\t')
record_parts = [word] + info.split(',')
# Pad the record out to have 10 parts if it doesn't
record_parts += [None] * (10 - len(record_parts))
record = MeCabRecord(*record_parts)
# special case for detecting nai -> n
if (record.surface == 'ん' and
record.conjugation == '不変化型'):
# rebuild the record so that record.root is 'nai'
record_parts[MeCabRecord._fields.index('root')] = 'ない'
record = MeCabRecord(*record_parts)
results.append(record)
return results
except ProcessError:
self.restart_process()
return self.analyze(text)
def get_record_pos(self, record):
"""
Given a record, get the word's part of speech.
Here we're going to return MeCab's part of speech (written in
Japanese), though if it's a stopword we prefix the part of speech
with '~'.
"""
if self.is_stopword_record(record):
return '~' + record.pos
else:
return record.pos
|
commonsense/metanl | metanl/mecab.py | MeCabWrapper.get_record_pos | python | def get_record_pos(self, record):
if self.is_stopword_record(record):
return '~' + record.pos
else:
return record.pos | Given a record, get the word's part of speech.
Here we're going to return MeCab's part of speech (written in
Japanese), though if it's a stopword we prefix the part of speech
with '~'. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/mecab.py#L180-L191 | [
"def is_stopword_record(self, record):\n \"\"\"\n Determine whether a single MeCab record represents a stopword.\n\n This mostly determines words to strip based on their parts of speech.\n If common_words is set to True (default), it will also strip common\n verbs and nouns such as くる and よう. If more... | class MeCabWrapper(ProcessWrapper):
"""
Handle Japanese text using the command-line version of MeCab.
(mecab-python is convenient, but its installer is too flaky to rely on.)
ja_cabocha gives more sophisticated results, but requires a large number of
additional dependencies. Using this tool for Japanese requires only
MeCab to be installed and accepting UTF-8 text.
"""
def _get_command(self):
return ['mecab']
def _get_process(self):
try:
proc = ProcessWrapper._get_process(self)
except (OSError, ProcessError):
raise MeCabError("MeCab didn't start. See README.txt for details "
"about installing MeCab and other Japanese NLP "
"tools.")
return proc
def get_record_root(self, record):
"""
Given a MeCab record, return the root word.
"""
if record.root == '*':
return record.surface
else:
return record.root
def get_record_token(self, record):
return record.surface
def analyze(self, text):
"""
Runs a line of text through MeCab, and returns the results as a
list of lists ("records") that contain the MeCab analysis of each
word.
"""
try:
self.process # make sure things are loaded
text = render_safe(text).replace('\n', ' ').lower()
results = []
for chunk in string_pieces(text):
self.send_input((chunk + '\n').encode('utf-8'))
while True:
out_line = self.receive_output_line().decode('utf-8')
if out_line == 'EOS\n':
break
word, info = out_line.strip('\n').split('\t')
record_parts = [word] + info.split(',')
# Pad the record out to have 10 parts if it doesn't
record_parts += [None] * (10 - len(record_parts))
record = MeCabRecord(*record_parts)
# special case for detecting nai -> n
if (record.surface == 'ん' and
record.conjugation == '不変化型'):
# rebuild the record so that record.root is 'nai'
record_parts[MeCabRecord._fields.index('root')] = 'ない'
record = MeCabRecord(*record_parts)
results.append(record)
return results
except ProcessError:
self.restart_process()
return self.analyze(text)
def is_stopword_record(self, record):
"""
Determine whether a single MeCab record represents a stopword.
This mostly determines words to strip based on their parts of speech.
If common_words is set to True (default), it will also strip common
verbs and nouns such as くる and よう. If more_stopwords is True, it
will look at the sub-part of speech to remove more categories.
"""
# preserve negations
if record.root == 'ない':
return False
return (
record.pos in STOPWORD_CATEGORIES or
record.subclass1 in STOPWORD_CATEGORIES or
record.root in STOPWORD_ROOTS
)
|
commonsense/metanl | metanl/freeling.py | FreelingWrapper.analyze | python | def analyze(self, text):
try:
text = render_safe(text).strip()
if not text:
return []
chunks = text.split('\n')
results = []
for chunk_text in chunks:
if chunk_text.strip():
textbytes = (chunk_text + '\n').encode('utf-8')
self.send_input(textbytes)
out_line = ''
while True:
out_line = self.receive_output_line()
out_line = out_line.decode('utf-8')
if out_line == '\n':
break
record = out_line.strip('\n').split(' ')
results.append(record)
return results
except ProcessError:
self.restart_process()
return self.analyze(text) | Run text through the external process, and get a list of lists
("records") that contain the analysis of each word. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/freeling.py#L76-L104 | [
"def render_safe(text):\n '''\n Make sure the given text is safe to pass to an external process.\n '''\n return remove_control_chars(remove_unsafe_private_use(text))\n",
"def send_input(self, data):\n self.process.stdin.write(data)\n self.process.stdin.flush()\n",
"def receive_output_line(self... | class FreelingWrapper(ProcessWrapper):
r"""
Handle English, Spanish, Italian, Portuguese, or Welsh text by calling an
installed copy of FreeLing.
The constructor takes one argument, which is the installed filename of the
language-specific config file, such as 'en.cfg'.
>>> english.tag_and_stem("This is a test.\n\nIt has two paragraphs, and that's okay.")
[('this', 'DT', 'This'), ('be', 'VBZ', 'is'), ('a', 'DT', 'a'), ('test', 'NN', 'test'), ('.', '.', '.'), ('it', 'PRP', 'It'), ('have', 'VBZ', 'has'), ('two', 'DT', 'two'), ('paragraph', 'NNS', 'paragraphs'), (',', '.', ','), ('and', 'CC', 'and'), ('that', 'PRP', 'that'), ('be', 'VBZ', "'s"), ('okay', 'JJ', 'okay'), ('.', '.', '.')]
>>> english.tag_and_stem("this has\ntwo lines")
[('this', 'DT', 'this'), ('have', 'VBZ', 'has'), ('two', 'DT', 'two'), ('line', 'NNS', 'lines')]
"""
def __init__(self, lang):
self.lang = lang
self.configfile = pkg_resources.resource_filename(
__name__, 'data/freeling/%s.cfg' % lang)
self.splitterfile = pkg_resources.resource_filename(
__name__, 'data/freeling/generic_splitter.dat')
def _get_command(self):
"""
Get the command for running the basic FreeLing pipeline in the
specified language.
The options we choose are:
-f data/freeling/<language>.cfg
load our custom configuration for the language
--fsplit data/freeling/generic_splitter.dat
don't do any special handling of ends of sentences
"""
return ['analyze', '-f', self.configfile, '--fsplit',
self.splitterfile]
def get_record_root(self, record):
"""
Given a FreeLing record, return the root word.
"""
return record[1].lower()
def get_record_token(self, record):
"""
The token of a FreeLing record is the first item on the line.
"""
return record[0]
def get_record_pos(self, record):
"""
In English, return the third segment of the record.
In other languages, this segment contains one letter for the part of
speech, plus densely-encoded features that we really have no way to
use. Return just the part-of-speech letter.
"""
if self.lang == 'en':
return record[2]
else:
return record[2][0]
def is_stopword_record(self, record, common_words=False):
"""
Determiners are stopwords. Detect this by checking whether their POS
starts with 'D'.
"""
return (record[2][0] == 'D')
|
commonsense/metanl | metanl/token_utils.py | untokenize | python | def untokenize(words):
text = ' '.join(words)
step1 = text.replace("`` ", '"').replace(" ''", '"').replace('. . .', '...')
step2 = step1.replace(" ( ", " (").replace(" ) ", ") ")
step3 = re.sub(r' ([.,:;?!%]+)([ \'"`])', r"\1\2", step2)
step4 = re.sub(r' ([.,:;?!%]+)$', r"\1", step3)
step5 = step4.replace(" '", "'").replace(" n't", "n't").replace(
"can not", "cannot")
step6 = step5.replace(" ` ", " '")
return step6.strip() | Untokenizing a text undoes the tokenizing operation, restoring
punctuation and spaces to the places that people expect them to be.
Ideally, `untokenize(tokenize(text))` should be identical to `text`,
except for line breaks. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/token_utils.py#L28-L44 | null | # coding: utf-8
from __future__ import unicode_literals
"""
This file contains some generally useful operations you would perform to
separate and join tokens. The tools apply most to English, but should also
be able to do their job in any Western language that uses spaces.
"""
import re
import unicodedata
def tokenize(text):
"""
Split a text into tokens (words, morphemes we can separate such as
"n't", and punctuation).
"""
return list(_tokenize_gen(text))
def _tokenize_gen(text):
import nltk
for sent in nltk.sent_tokenize(text):
for word in nltk.word_tokenize(sent):
yield word
# This expression scans through a reversed string to find segments of
# camel-cased text. Comments show what these mean, forwards, in preference
# order:
CAMEL_RE = re.compile(r"""
^( [A-Z]+ # A string of all caps, such as an acronym
| [^A-Z0-9 _]+[A-Z _] # A single capital letter followed by lowercase
# letters, or lowercase letters on their own
# after a word break
| [^A-Z0-9 _]*[0-9.]+ # A number, possibly followed by lowercase
# letters
| [ _]+ # Extra word breaks (spaces or underscores)
| [^A-Z0-9]*[^A-Z0-9_ ]+ # Miscellaneous symbols, possibly with lowercase
# letters after them
)
""", re.VERBOSE)
def un_camel_case(text):
r"""
Splits apart words that are written in CamelCase.
Bugs:
- Non-ASCII characters are treated as lowercase letters, even if they are
actually capital letters.
Examples:
>>> un_camel_case('1984ZXSpectrumGames')
'1984 ZX Spectrum Games'
>>> un_camel_case('aaAa aaAaA 0aA AAAa!AAA')
'aa Aa aa Aa A 0a A AA Aa! AAA'
>>> un_camel_case('MotörHead')
'Mot\xf6r Head'
>>> un_camel_case('MSWindows3.11ForWorkgroups')
'MS Windows 3.11 For Workgroups'
This should not significantly affect text that is not camel-cased:
>>> un_camel_case('ACM_Computing_Classification_System')
'ACM Computing Classification System'
>>> un_camel_case('Anne_Blunt,_15th_Baroness_Wentworth')
'Anne Blunt, 15th Baroness Wentworth'
>>> un_camel_case('Hindi-Urdu')
'Hindi-Urdu'
"""
revtext = text[::-1]
pieces = []
while revtext:
match = CAMEL_RE.match(revtext)
if match:
pieces.append(match.group(1))
revtext = revtext[match.end():]
else:
pieces.append(revtext)
revtext = ''
revstr = ' '.join(piece.strip(' _') for piece in pieces
if piece.strip(' _'))
return revstr[::-1].replace('- ', '-')
# see http://www.fileformat.info/info/unicode/category/index.htm
BOUNDARY_CATEGORIES = {'Cc', # control characters
'Cf', # format characters
'Cn', # "other, not assigned"
'Pc', # connector punctuation
'Pd', # dash
'Pe', # close-punctuation
'Pf', # final-quote
'Pi', # initial-quote
'Po', # other punctuation
'Zl', # line separator
'Zp', # paragraph separator
'Zs', # space separator
}
def string_pieces(s, maxlen=1024):
"""
Takes a (unicode) string and yields pieces of it that are at most `maxlen`
characters, trying to break it at punctuation/whitespace. This is an
important step before using a tokenizer with a maximum buffer size.
"""
if not s:
return
i = 0
while True:
j = i + maxlen
if j >= len(s):
yield s[i:]
return
# Using "j - 1" keeps boundary characters with the left chunk
while unicodedata.category(s[j - 1]) not in BOUNDARY_CATEGORIES:
j -= 1
if j == i:
# No boundary available; oh well.
j = i + maxlen
break
yield s[i:j]
i = j
|
commonsense/metanl | metanl/token_utils.py | un_camel_case | python | def un_camel_case(text):
r"""
Splits apart words that are written in CamelCase.
Bugs:
- Non-ASCII characters are treated as lowercase letters, even if they are
actually capital letters.
Examples:
>>> un_camel_case('1984ZXSpectrumGames')
'1984 ZX Spectrum Games'
>>> un_camel_case('aaAa aaAaA 0aA AAAa!AAA')
'aa Aa aa Aa A 0a A AA Aa! AAA'
>>> un_camel_case('MotörHead')
'Mot\xf6r Head'
>>> un_camel_case('MSWindows3.11ForWorkgroups')
'MS Windows 3.11 For Workgroups'
This should not significantly affect text that is not camel-cased:
>>> un_camel_case('ACM_Computing_Classification_System')
'ACM Computing Classification System'
>>> un_camel_case('Anne_Blunt,_15th_Baroness_Wentworth')
'Anne Blunt, 15th Baroness Wentworth'
>>> un_camel_case('Hindi-Urdu')
'Hindi-Urdu'
"""
revtext = text[::-1]
pieces = []
while revtext:
match = CAMEL_RE.match(revtext)
if match:
pieces.append(match.group(1))
revtext = revtext[match.end():]
else:
pieces.append(revtext)
revtext = ''
revstr = ' '.join(piece.strip(' _') for piece in pieces
if piece.strip(' _'))
return revstr[::-1].replace('- ', '-') | r"""
Splits apart words that are written in CamelCase.
Bugs:
- Non-ASCII characters are treated as lowercase letters, even if they are
actually capital letters.
Examples:
>>> un_camel_case('1984ZXSpectrumGames')
'1984 ZX Spectrum Games'
>>> un_camel_case('aaAa aaAaA 0aA AAAa!AAA')
'aa Aa aa Aa A 0a A AA Aa! AAA'
>>> un_camel_case('MotörHead')
'Mot\xf6r Head'
>>> un_camel_case('MSWindows3.11ForWorkgroups')
'MS Windows 3.11 For Workgroups'
This should not significantly affect text that is not camel-cased:
>>> un_camel_case('ACM_Computing_Classification_System')
'ACM Computing Classification System'
>>> un_camel_case('Anne_Blunt,_15th_Baroness_Wentworth')
'Anne Blunt, 15th Baroness Wentworth'
>>> un_camel_case('Hindi-Urdu')
'Hindi-Urdu' | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/token_utils.py#L64-L110 | null | # coding: utf-8
from __future__ import unicode_literals
"""
This file contains some generally useful operations you would perform to
separate and join tokens. The tools apply most to English, but should also
be able to do their job in any Western language that uses spaces.
"""
import re
import unicodedata
def tokenize(text):
"""
Split a text into tokens (words, morphemes we can separate such as
"n't", and punctuation).
"""
return list(_tokenize_gen(text))
def _tokenize_gen(text):
import nltk
for sent in nltk.sent_tokenize(text):
for word in nltk.word_tokenize(sent):
yield word
def untokenize(words):
"""
Untokenizing a text undoes the tokenizing operation, restoring
punctuation and spaces to the places that people expect them to be.
Ideally, `untokenize(tokenize(text))` should be identical to `text`,
except for line breaks.
"""
text = ' '.join(words)
step1 = text.replace("`` ", '"').replace(" ''", '"').replace('. . .', '...')
step2 = step1.replace(" ( ", " (").replace(" ) ", ") ")
step3 = re.sub(r' ([.,:;?!%]+)([ \'"`])', r"\1\2", step2)
step4 = re.sub(r' ([.,:;?!%]+)$', r"\1", step3)
step5 = step4.replace(" '", "'").replace(" n't", "n't").replace(
"can not", "cannot")
step6 = step5.replace(" ` ", " '")
return step6.strip()
# This expression scans through a reversed string to find segments of
# camel-cased text. Comments show what these mean, forwards, in preference
# order:
CAMEL_RE = re.compile(r"""
^( [A-Z]+ # A string of all caps, such as an acronym
| [^A-Z0-9 _]+[A-Z _] # A single capital letter followed by lowercase
# letters, or lowercase letters on their own
# after a word break
| [^A-Z0-9 _]*[0-9.]+ # A number, possibly followed by lowercase
# letters
| [ _]+ # Extra word breaks (spaces or underscores)
| [^A-Z0-9]*[^A-Z0-9_ ]+ # Miscellaneous symbols, possibly with lowercase
# letters after them
)
""", re.VERBOSE)
# see http://www.fileformat.info/info/unicode/category/index.htm
BOUNDARY_CATEGORIES = {'Cc', # control characters
'Cf', # format characters
'Cn', # "other, not assigned"
'Pc', # connector punctuation
'Pd', # dash
'Pe', # close-punctuation
'Pf', # final-quote
'Pi', # initial-quote
'Po', # other punctuation
'Zl', # line separator
'Zp', # paragraph separator
'Zs', # space separator
}
def string_pieces(s, maxlen=1024):
"""
Takes a (unicode) string and yields pieces of it that are at most `maxlen`
characters, trying to break it at punctuation/whitespace. This is an
important step before using a tokenizer with a maximum buffer size.
"""
if not s:
return
i = 0
while True:
j = i + maxlen
if j >= len(s):
yield s[i:]
return
# Using "j - 1" keeps boundary characters with the left chunk
while unicodedata.category(s[j - 1]) not in BOUNDARY_CATEGORIES:
j -= 1
if j == i:
# No boundary available; oh well.
j = i + maxlen
break
yield s[i:j]
i = j
|
commonsense/metanl | metanl/token_utils.py | string_pieces | python | def string_pieces(s, maxlen=1024):
if not s:
return
i = 0
while True:
j = i + maxlen
if j >= len(s):
yield s[i:]
return
# Using "j - 1" keeps boundary characters with the left chunk
while unicodedata.category(s[j - 1]) not in BOUNDARY_CATEGORIES:
j -= 1
if j == i:
# No boundary available; oh well.
j = i + maxlen
break
yield s[i:j]
i = j | Takes a (unicode) string and yields pieces of it that are at most `maxlen`
characters, trying to break it at punctuation/whitespace. This is an
important step before using a tokenizer with a maximum buffer size. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/token_utils.py#L128-L150 | null | # coding: utf-8
from __future__ import unicode_literals
"""
This file contains some generally useful operations you would perform to
separate and join tokens. The tools apply most to English, but should also
be able to do their job in any Western language that uses spaces.
"""
import re
import unicodedata
def tokenize(text):
"""
Split a text into tokens (words, morphemes we can separate such as
"n't", and punctuation).
"""
return list(_tokenize_gen(text))
def _tokenize_gen(text):
import nltk
for sent in nltk.sent_tokenize(text):
for word in nltk.word_tokenize(sent):
yield word
def untokenize(words):
"""
Untokenizing a text undoes the tokenizing operation, restoring
punctuation and spaces to the places that people expect them to be.
Ideally, `untokenize(tokenize(text))` should be identical to `text`,
except for line breaks.
"""
text = ' '.join(words)
step1 = text.replace("`` ", '"').replace(" ''", '"').replace('. . .', '...')
step2 = step1.replace(" ( ", " (").replace(" ) ", ") ")
step3 = re.sub(r' ([.,:;?!%]+)([ \'"`])', r"\1\2", step2)
step4 = re.sub(r' ([.,:;?!%]+)$', r"\1", step3)
step5 = step4.replace(" '", "'").replace(" n't", "n't").replace(
"can not", "cannot")
step6 = step5.replace(" ` ", " '")
return step6.strip()
# This expression scans through a reversed string to find segments of
# camel-cased text. Comments show what these mean, forwards, in preference
# order:
CAMEL_RE = re.compile(r"""
^( [A-Z]+ # A string of all caps, such as an acronym
| [^A-Z0-9 _]+[A-Z _] # A single capital letter followed by lowercase
# letters, or lowercase letters on their own
# after a word break
| [^A-Z0-9 _]*[0-9.]+ # A number, possibly followed by lowercase
# letters
| [ _]+ # Extra word breaks (spaces or underscores)
| [^A-Z0-9]*[^A-Z0-9_ ]+ # Miscellaneous symbols, possibly with lowercase
# letters after them
)
""", re.VERBOSE)
def un_camel_case(text):
r"""
Splits apart words that are written in CamelCase.
Bugs:
- Non-ASCII characters are treated as lowercase letters, even if they are
actually capital letters.
Examples:
>>> un_camel_case('1984ZXSpectrumGames')
'1984 ZX Spectrum Games'
>>> un_camel_case('aaAa aaAaA 0aA AAAa!AAA')
'aa Aa aa Aa A 0a A AA Aa! AAA'
>>> un_camel_case('MotörHead')
'Mot\xf6r Head'
>>> un_camel_case('MSWindows3.11ForWorkgroups')
'MS Windows 3.11 For Workgroups'
This should not significantly affect text that is not camel-cased:
>>> un_camel_case('ACM_Computing_Classification_System')
'ACM Computing Classification System'
>>> un_camel_case('Anne_Blunt,_15th_Baroness_Wentworth')
'Anne Blunt, 15th Baroness Wentworth'
>>> un_camel_case('Hindi-Urdu')
'Hindi-Urdu'
"""
revtext = text[::-1]
pieces = []
while revtext:
match = CAMEL_RE.match(revtext)
if match:
pieces.append(match.group(1))
revtext = revtext[match.end():]
else:
pieces.append(revtext)
revtext = ''
revstr = ' '.join(piece.strip(' _') for piece in pieces
if piece.strip(' _'))
return revstr[::-1].replace('- ', '-')
# see http://www.fileformat.info/info/unicode/category/index.htm
BOUNDARY_CATEGORIES = {'Cc', # control characters
'Cf', # format characters
'Cn', # "other, not assigned"
'Pc', # connector punctuation
'Pd', # dash
'Pe', # close-punctuation
'Pf', # final-quote
'Pi', # initial-quote
'Po', # other punctuation
'Zl', # line separator
'Zp', # paragraph separator
'Zs', # space separator
}
|
commonsense/metanl | metanl/nltk_morphy.py | _word_badness | python | def _word_badness(word):
if word.endswith('e'):
return len(word) - 2
elif word.endswith('ess'):
return len(word) - 10
elif word.endswith('ss'):
return len(word) - 4
else:
return len(word) | Assign a heuristic to possible outputs from Morphy. Minimizing this
heuristic avoids incorrect stems. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/nltk_morphy.py#L88-L100 | null | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import nltk
from nltk.corpus import wordnet
from metanl.token_utils import untokenize, tokenize
import re
try:
morphy = wordnet._morphy
except LookupError:
nltk.download('wordnet')
morphy = wordnet._morphy
STOPWORDS = ['the', 'a', 'an']
EXCEPTIONS = {
# Avoid obsolete and obscure roots, the way lexicographers don't.
'wrought': 'wrought', # not 'work'
'media': 'media', # not 'medium'
'installed': 'install', # not 'instal'
'installing': 'install',# not 'instal'
'synapses': 'synapse', # not 'synapsis'
'soles': 'sole', # not 'sol'
'pubes': 'pube', # not 'pubis'
'dui': 'dui', # not 'duo'
'taxis': 'taxi', # not 'taxis'
# Work around errors that Morphy makes.
'alas': 'alas',
'corps': 'corps',
'cos': 'cos',
'enured': 'enure',
'fiver': 'fiver',
'hinder': 'hinder',
'lobed': 'lobe',
'offerer': 'offerer',
'outer': 'outer',
'sang': 'sing',
'singing': 'sing',
'solderer': 'solderer',
'tined': 'tine',
'twiner': 'twiner',
'us': 'us',
# Stem common nouns whose plurals are apparently ambiguous
'teeth': 'tooth',
'things': 'thing',
'people': 'person',
# Tokenization artifacts
'wo': 'will',
'ca': 'can',
"n't": 'not',
}
AMBIGUOUS_EXCEPTIONS = {
# Avoid nouns that shadow more common verbs.
'am': 'be',
'as': 'as',
'are': 'be',
'ate': 'eat',
'bent': 'bend',
'drove': 'drive',
'fell': 'fall',
'felt': 'feel',
'found': 'find',
'has': 'have',
'lit': 'light',
'lost': 'lose',
'sat': 'sit',
'saw': 'see',
'sent': 'send',
'shook': 'shake',
'shot': 'shoot',
'slain': 'slay',
'spoke': 'speak',
'stole': 'steal',
'sung': 'sing',
'thought': 'think',
'tore': 'tear',
'was': 'be',
'won': 'win',
'feed': 'feed',
}
def _morphy_best(word, pos=None):
"""
Get the most likely stem for a word using Morphy, once the input has been
pre-processed by morphy_stem().
"""
results = []
if pos is None:
pos = 'nvar'
for pos_item in pos:
results.extend(morphy(word, pos_item))
if not results:
return None
results.sort(key=lambda x: _word_badness(x))
return results[0]
def morphy_stem(word, pos=None):
"""
Get the most likely stem for a word. If a part of speech is supplied,
the stem will be more accurate.
Valid parts of speech are:
- 'n' or 'NN' for nouns
- 'v' or 'VB' for verbs
- 'a' or 'JJ' for adjectives
- 'r' or 'RB' for adverbs
Any other part of speech will be treated as unknown.
"""
word = word.lower()
if pos is not None:
if pos.startswith('NN'):
pos = 'n'
elif pos.startswith('VB'):
pos = 'v'
elif pos.startswith('JJ'):
pos = 'a'
elif pos.startswith('RB'):
pos = 'r'
if pos is None and word.endswith('ing') or word.endswith('ed'):
pos = 'v'
if pos is not None and pos not in 'nvar':
pos = None
if word in EXCEPTIONS:
return EXCEPTIONS[word]
if pos is None:
if word in AMBIGUOUS_EXCEPTIONS:
return AMBIGUOUS_EXCEPTIONS[word]
return _morphy_best(word, pos) or word
def tag_and_stem(text):
"""
Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later
"""
tokens = tokenize(text)
tagged = nltk.pos_tag(tokens)
out = []
for token, tag in tagged:
stem = morphy_stem(token, tag)
out.append((stem, tag, token))
return out
def good_lemma(lemma):
return lemma and lemma not in STOPWORDS and lemma[0].isalnum()
def normalize_list(text):
"""
Get a list of word stems that appear in the text. Stopwords and an initial
'to' will be stripped, unless this leaves nothing in the stem.
>>> normalize_list('the dog')
['dog']
>>> normalize_list('big dogs')
['big', 'dog']
>>> normalize_list('the')
['the']
"""
pieces = [morphy_stem(word) for word in tokenize(text)]
pieces = [piece for piece in pieces if good_lemma(piece)]
if not pieces:
return [text]
if pieces[0] == 'to':
pieces = pieces[1:]
return pieces
def normalize(text):
"""
Get a string made from the non-stopword word stems in the text. See
normalize_list().
"""
return untokenize(normalize_list(text))
def normalize_topic(topic):
"""
Get a canonical representation of a Wikipedia topic, which may include
a disambiguation string in parentheses.
Returns (name, disambig), where "name" is the normalized topic name,
and "disambig" is a string corresponding to the disambiguation text or
None.
"""
# find titles of the form Foo (bar)
topic = topic.replace('_', ' ')
match = re.match(r'([^(]+) \(([^)]+)\)', topic)
if not match:
return normalize(topic), None
else:
return normalize(match.group(1)), 'n/' + match.group(2).strip(' _')
def word_frequency(word, default_freq=0):
raise NotImplementedError("Word frequency is now in the wordfreq package.")
def get_wordlist():
raise NotImplementedError("Wordlists are now in the wordfreq package.")
|
commonsense/metanl | metanl/nltk_morphy.py | _morphy_best | python | def _morphy_best(word, pos=None):
results = []
if pos is None:
pos = 'nvar'
for pos_item in pos:
results.extend(morphy(word, pos_item))
if not results:
return None
results.sort(key=lambda x: _word_badness(x))
return results[0] | Get the most likely stem for a word using Morphy, once the input has been
pre-processed by morphy_stem(). | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/nltk_morphy.py#L103-L116 | null | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import nltk
from nltk.corpus import wordnet
from metanl.token_utils import untokenize, tokenize
import re
try:
morphy = wordnet._morphy
except LookupError:
nltk.download('wordnet')
morphy = wordnet._morphy
STOPWORDS = ['the', 'a', 'an']
EXCEPTIONS = {
# Avoid obsolete and obscure roots, the way lexicographers don't.
'wrought': 'wrought', # not 'work'
'media': 'media', # not 'medium'
'installed': 'install', # not 'instal'
'installing': 'install',# not 'instal'
'synapses': 'synapse', # not 'synapsis'
'soles': 'sole', # not 'sol'
'pubes': 'pube', # not 'pubis'
'dui': 'dui', # not 'duo'
'taxis': 'taxi', # not 'taxis'
# Work around errors that Morphy makes.
'alas': 'alas',
'corps': 'corps',
'cos': 'cos',
'enured': 'enure',
'fiver': 'fiver',
'hinder': 'hinder',
'lobed': 'lobe',
'offerer': 'offerer',
'outer': 'outer',
'sang': 'sing',
'singing': 'sing',
'solderer': 'solderer',
'tined': 'tine',
'twiner': 'twiner',
'us': 'us',
# Stem common nouns whose plurals are apparently ambiguous
'teeth': 'tooth',
'things': 'thing',
'people': 'person',
# Tokenization artifacts
'wo': 'will',
'ca': 'can',
"n't": 'not',
}
AMBIGUOUS_EXCEPTIONS = {
# Avoid nouns that shadow more common verbs.
'am': 'be',
'as': 'as',
'are': 'be',
'ate': 'eat',
'bent': 'bend',
'drove': 'drive',
'fell': 'fall',
'felt': 'feel',
'found': 'find',
'has': 'have',
'lit': 'light',
'lost': 'lose',
'sat': 'sit',
'saw': 'see',
'sent': 'send',
'shook': 'shake',
'shot': 'shoot',
'slain': 'slay',
'spoke': 'speak',
'stole': 'steal',
'sung': 'sing',
'thought': 'think',
'tore': 'tear',
'was': 'be',
'won': 'win',
'feed': 'feed',
}
def _word_badness(word):
"""
Assign a heuristic to possible outputs from Morphy. Minimizing this
heuristic avoids incorrect stems.
"""
if word.endswith('e'):
return len(word) - 2
elif word.endswith('ess'):
return len(word) - 10
elif word.endswith('ss'):
return len(word) - 4
else:
return len(word)
def morphy_stem(word, pos=None):
"""
Get the most likely stem for a word. If a part of speech is supplied,
the stem will be more accurate.
Valid parts of speech are:
- 'n' or 'NN' for nouns
- 'v' or 'VB' for verbs
- 'a' or 'JJ' for adjectives
- 'r' or 'RB' for adverbs
Any other part of speech will be treated as unknown.
"""
word = word.lower()
if pos is not None:
if pos.startswith('NN'):
pos = 'n'
elif pos.startswith('VB'):
pos = 'v'
elif pos.startswith('JJ'):
pos = 'a'
elif pos.startswith('RB'):
pos = 'r'
if pos is None and word.endswith('ing') or word.endswith('ed'):
pos = 'v'
if pos is not None and pos not in 'nvar':
pos = None
if word in EXCEPTIONS:
return EXCEPTIONS[word]
if pos is None:
if word in AMBIGUOUS_EXCEPTIONS:
return AMBIGUOUS_EXCEPTIONS[word]
return _morphy_best(word, pos) or word
def tag_and_stem(text):
"""
Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later
"""
tokens = tokenize(text)
tagged = nltk.pos_tag(tokens)
out = []
for token, tag in tagged:
stem = morphy_stem(token, tag)
out.append((stem, tag, token))
return out
def good_lemma(lemma):
return lemma and lemma not in STOPWORDS and lemma[0].isalnum()
def normalize_list(text):
"""
Get a list of word stems that appear in the text. Stopwords and an initial
'to' will be stripped, unless this leaves nothing in the stem.
>>> normalize_list('the dog')
['dog']
>>> normalize_list('big dogs')
['big', 'dog']
>>> normalize_list('the')
['the']
"""
pieces = [morphy_stem(word) for word in tokenize(text)]
pieces = [piece for piece in pieces if good_lemma(piece)]
if not pieces:
return [text]
if pieces[0] == 'to':
pieces = pieces[1:]
return pieces
def normalize(text):
"""
Get a string made from the non-stopword word stems in the text. See
normalize_list().
"""
return untokenize(normalize_list(text))
def normalize_topic(topic):
"""
Get a canonical representation of a Wikipedia topic, which may include
a disambiguation string in parentheses.
Returns (name, disambig), where "name" is the normalized topic name,
and "disambig" is a string corresponding to the disambiguation text or
None.
"""
# find titles of the form Foo (bar)
topic = topic.replace('_', ' ')
match = re.match(r'([^(]+) \(([^)]+)\)', topic)
if not match:
return normalize(topic), None
else:
return normalize(match.group(1)), 'n/' + match.group(2).strip(' _')
def word_frequency(word, default_freq=0):
raise NotImplementedError("Word frequency is now in the wordfreq package.")
def get_wordlist():
raise NotImplementedError("Wordlists are now in the wordfreq package.")
|
commonsense/metanl | metanl/nltk_morphy.py | morphy_stem | python | def morphy_stem(word, pos=None):
word = word.lower()
if pos is not None:
if pos.startswith('NN'):
pos = 'n'
elif pos.startswith('VB'):
pos = 'v'
elif pos.startswith('JJ'):
pos = 'a'
elif pos.startswith('RB'):
pos = 'r'
if pos is None and word.endswith('ing') or word.endswith('ed'):
pos = 'v'
if pos is not None and pos not in 'nvar':
pos = None
if word in EXCEPTIONS:
return EXCEPTIONS[word]
if pos is None:
if word in AMBIGUOUS_EXCEPTIONS:
return AMBIGUOUS_EXCEPTIONS[word]
return _morphy_best(word, pos) or word | Get the most likely stem for a word. If a part of speech is supplied,
the stem will be more accurate.
Valid parts of speech are:
- 'n' or 'NN' for nouns
- 'v' or 'VB' for verbs
- 'a' or 'JJ' for adjectives
- 'r' or 'RB' for adverbs
Any other part of speech will be treated as unknown. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/nltk_morphy.py#L119-L152 | [
"def _morphy_best(word, pos=None):\n \"\"\"\n Get the most likely stem for a word using Morphy, once the input has been\n pre-processed by morphy_stem().\n \"\"\"\n results = []\n if pos is None:\n pos = 'nvar'\n for pos_item in pos:\n results.extend(morphy(word, pos_item))\n i... | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import nltk
from nltk.corpus import wordnet
from metanl.token_utils import untokenize, tokenize
import re
try:
morphy = wordnet._morphy
except LookupError:
nltk.download('wordnet')
morphy = wordnet._morphy
STOPWORDS = ['the', 'a', 'an']
EXCEPTIONS = {
# Avoid obsolete and obscure roots, the way lexicographers don't.
'wrought': 'wrought', # not 'work'
'media': 'media', # not 'medium'
'installed': 'install', # not 'instal'
'installing': 'install',# not 'instal'
'synapses': 'synapse', # not 'synapsis'
'soles': 'sole', # not 'sol'
'pubes': 'pube', # not 'pubis'
'dui': 'dui', # not 'duo'
'taxis': 'taxi', # not 'taxis'
# Work around errors that Morphy makes.
'alas': 'alas',
'corps': 'corps',
'cos': 'cos',
'enured': 'enure',
'fiver': 'fiver',
'hinder': 'hinder',
'lobed': 'lobe',
'offerer': 'offerer',
'outer': 'outer',
'sang': 'sing',
'singing': 'sing',
'solderer': 'solderer',
'tined': 'tine',
'twiner': 'twiner',
'us': 'us',
# Stem common nouns whose plurals are apparently ambiguous
'teeth': 'tooth',
'things': 'thing',
'people': 'person',
# Tokenization artifacts
'wo': 'will',
'ca': 'can',
"n't": 'not',
}
AMBIGUOUS_EXCEPTIONS = {
# Avoid nouns that shadow more common verbs.
'am': 'be',
'as': 'as',
'are': 'be',
'ate': 'eat',
'bent': 'bend',
'drove': 'drive',
'fell': 'fall',
'felt': 'feel',
'found': 'find',
'has': 'have',
'lit': 'light',
'lost': 'lose',
'sat': 'sit',
'saw': 'see',
'sent': 'send',
'shook': 'shake',
'shot': 'shoot',
'slain': 'slay',
'spoke': 'speak',
'stole': 'steal',
'sung': 'sing',
'thought': 'think',
'tore': 'tear',
'was': 'be',
'won': 'win',
'feed': 'feed',
}
def _word_badness(word):
"""
Assign a heuristic to possible outputs from Morphy. Minimizing this
heuristic avoids incorrect stems.
"""
if word.endswith('e'):
return len(word) - 2
elif word.endswith('ess'):
return len(word) - 10
elif word.endswith('ss'):
return len(word) - 4
else:
return len(word)
def _morphy_best(word, pos=None):
"""
Get the most likely stem for a word using Morphy, once the input has been
pre-processed by morphy_stem().
"""
results = []
if pos is None:
pos = 'nvar'
for pos_item in pos:
results.extend(morphy(word, pos_item))
if not results:
return None
results.sort(key=lambda x: _word_badness(x))
return results[0]
def tag_and_stem(text):
"""
Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later
"""
tokens = tokenize(text)
tagged = nltk.pos_tag(tokens)
out = []
for token, tag in tagged:
stem = morphy_stem(token, tag)
out.append((stem, tag, token))
return out
def good_lemma(lemma):
return lemma and lemma not in STOPWORDS and lemma[0].isalnum()
def normalize_list(text):
"""
Get a list of word stems that appear in the text. Stopwords and an initial
'to' will be stripped, unless this leaves nothing in the stem.
>>> normalize_list('the dog')
['dog']
>>> normalize_list('big dogs')
['big', 'dog']
>>> normalize_list('the')
['the']
"""
pieces = [morphy_stem(word) for word in tokenize(text)]
pieces = [piece for piece in pieces if good_lemma(piece)]
if not pieces:
return [text]
if pieces[0] == 'to':
pieces = pieces[1:]
return pieces
def normalize(text):
"""
Get a string made from the non-stopword word stems in the text. See
normalize_list().
"""
return untokenize(normalize_list(text))
def normalize_topic(topic):
"""
Get a canonical representation of a Wikipedia topic, which may include
a disambiguation string in parentheses.
Returns (name, disambig), where "name" is the normalized topic name,
and "disambig" is a string corresponding to the disambiguation text or
None.
"""
# find titles of the form Foo (bar)
topic = topic.replace('_', ' ')
match = re.match(r'([^(]+) \(([^)]+)\)', topic)
if not match:
return normalize(topic), None
else:
return normalize(match.group(1)), 'n/' + match.group(2).strip(' _')
def word_frequency(word, default_freq=0):
raise NotImplementedError("Word frequency is now in the wordfreq package.")
def get_wordlist():
raise NotImplementedError("Wordlists are now in the wordfreq package.")
|
commonsense/metanl | metanl/nltk_morphy.py | tag_and_stem | python | def tag_and_stem(text):
tokens = tokenize(text)
tagged = nltk.pos_tag(tokens)
out = []
for token, tag in tagged:
stem = morphy_stem(token, tag)
out.append((stem, tag, token))
return out | Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/nltk_morphy.py#L155-L169 | [
"def tokenize(text):\n \"\"\"\n Split a text into tokens (words, morphemes we can separate such as\n \"n't\", and punctuation).\n \"\"\"\n return list(_tokenize_gen(text))\n",
"def morphy_stem(word, pos=None):\n \"\"\"\n Get the most likely stem for a word. If a part of speech is supplied,\n ... | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import nltk
from nltk.corpus import wordnet
from metanl.token_utils import untokenize, tokenize
import re
try:
morphy = wordnet._morphy
except LookupError:
nltk.download('wordnet')
morphy = wordnet._morphy
STOPWORDS = ['the', 'a', 'an']
EXCEPTIONS = {
# Avoid obsolete and obscure roots, the way lexicographers don't.
'wrought': 'wrought', # not 'work'
'media': 'media', # not 'medium'
'installed': 'install', # not 'instal'
'installing': 'install',# not 'instal'
'synapses': 'synapse', # not 'synapsis'
'soles': 'sole', # not 'sol'
'pubes': 'pube', # not 'pubis'
'dui': 'dui', # not 'duo'
'taxis': 'taxi', # not 'taxis'
# Work around errors that Morphy makes.
'alas': 'alas',
'corps': 'corps',
'cos': 'cos',
'enured': 'enure',
'fiver': 'fiver',
'hinder': 'hinder',
'lobed': 'lobe',
'offerer': 'offerer',
'outer': 'outer',
'sang': 'sing',
'singing': 'sing',
'solderer': 'solderer',
'tined': 'tine',
'twiner': 'twiner',
'us': 'us',
# Stem common nouns whose plurals are apparently ambiguous
'teeth': 'tooth',
'things': 'thing',
'people': 'person',
# Tokenization artifacts
'wo': 'will',
'ca': 'can',
"n't": 'not',
}
AMBIGUOUS_EXCEPTIONS = {
# Avoid nouns that shadow more common verbs.
'am': 'be',
'as': 'as',
'are': 'be',
'ate': 'eat',
'bent': 'bend',
'drove': 'drive',
'fell': 'fall',
'felt': 'feel',
'found': 'find',
'has': 'have',
'lit': 'light',
'lost': 'lose',
'sat': 'sit',
'saw': 'see',
'sent': 'send',
'shook': 'shake',
'shot': 'shoot',
'slain': 'slay',
'spoke': 'speak',
'stole': 'steal',
'sung': 'sing',
'thought': 'think',
'tore': 'tear',
'was': 'be',
'won': 'win',
'feed': 'feed',
}
def _word_badness(word):
"""
Assign a heuristic to possible outputs from Morphy. Minimizing this
heuristic avoids incorrect stems.
"""
if word.endswith('e'):
return len(word) - 2
elif word.endswith('ess'):
return len(word) - 10
elif word.endswith('ss'):
return len(word) - 4
else:
return len(word)
def _morphy_best(word, pos=None):
"""
Get the most likely stem for a word using Morphy, once the input has been
pre-processed by morphy_stem().
"""
results = []
if pos is None:
pos = 'nvar'
for pos_item in pos:
results.extend(morphy(word, pos_item))
if not results:
return None
results.sort(key=lambda x: _word_badness(x))
return results[0]
def morphy_stem(word, pos=None):
"""
Get the most likely stem for a word. If a part of speech is supplied,
the stem will be more accurate.
Valid parts of speech are:
- 'n' or 'NN' for nouns
- 'v' or 'VB' for verbs
- 'a' or 'JJ' for adjectives
- 'r' or 'RB' for adverbs
Any other part of speech will be treated as unknown.
"""
word = word.lower()
if pos is not None:
if pos.startswith('NN'):
pos = 'n'
elif pos.startswith('VB'):
pos = 'v'
elif pos.startswith('JJ'):
pos = 'a'
elif pos.startswith('RB'):
pos = 'r'
if pos is None and word.endswith('ing') or word.endswith('ed'):
pos = 'v'
if pos is not None and pos not in 'nvar':
pos = None
if word in EXCEPTIONS:
return EXCEPTIONS[word]
if pos is None:
if word in AMBIGUOUS_EXCEPTIONS:
return AMBIGUOUS_EXCEPTIONS[word]
return _morphy_best(word, pos) or word
def good_lemma(lemma):
return lemma and lemma not in STOPWORDS and lemma[0].isalnum()
def normalize_list(text):
"""
Get a list of word stems that appear in the text. Stopwords and an initial
'to' will be stripped, unless this leaves nothing in the stem.
>>> normalize_list('the dog')
['dog']
>>> normalize_list('big dogs')
['big', 'dog']
>>> normalize_list('the')
['the']
"""
pieces = [morphy_stem(word) for word in tokenize(text)]
pieces = [piece for piece in pieces if good_lemma(piece)]
if not pieces:
return [text]
if pieces[0] == 'to':
pieces = pieces[1:]
return pieces
def normalize(text):
"""
Get a string made from the non-stopword word stems in the text. See
normalize_list().
"""
return untokenize(normalize_list(text))
def normalize_topic(topic):
"""
Get a canonical representation of a Wikipedia topic, which may include
a disambiguation string in parentheses.
Returns (name, disambig), where "name" is the normalized topic name,
and "disambig" is a string corresponding to the disambiguation text or
None.
"""
# find titles of the form Foo (bar)
topic = topic.replace('_', ' ')
match = re.match(r'([^(]+) \(([^)]+)\)', topic)
if not match:
return normalize(topic), None
else:
return normalize(match.group(1)), 'n/' + match.group(2).strip(' _')
def word_frequency(word, default_freq=0):
raise NotImplementedError("Word frequency is now in the wordfreq package.")
def get_wordlist():
raise NotImplementedError("Wordlists are now in the wordfreq package.")
|
commonsense/metanl | metanl/nltk_morphy.py | normalize_list | python | def normalize_list(text):
pieces = [morphy_stem(word) for word in tokenize(text)]
pieces = [piece for piece in pieces if good_lemma(piece)]
if not pieces:
return [text]
if pieces[0] == 'to':
pieces = pieces[1:]
return pieces | Get a list of word stems that appear in the text. Stopwords and an initial
'to' will be stripped, unless this leaves nothing in the stem.
>>> normalize_list('the dog')
['dog']
>>> normalize_list('big dogs')
['big', 'dog']
>>> normalize_list('the')
['the'] | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/nltk_morphy.py#L176-L194 | [
"def tokenize(text):\n \"\"\"\n Split a text into tokens (words, morphemes we can separate such as\n \"n't\", and punctuation).\n \"\"\"\n return list(_tokenize_gen(text))\n"
] | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import nltk
from nltk.corpus import wordnet
from metanl.token_utils import untokenize, tokenize
import re
try:
morphy = wordnet._morphy
except LookupError:
nltk.download('wordnet')
morphy = wordnet._morphy
STOPWORDS = ['the', 'a', 'an']
EXCEPTIONS = {
# Avoid obsolete and obscure roots, the way lexicographers don't.
'wrought': 'wrought', # not 'work'
'media': 'media', # not 'medium'
'installed': 'install', # not 'instal'
'installing': 'install',# not 'instal'
'synapses': 'synapse', # not 'synapsis'
'soles': 'sole', # not 'sol'
'pubes': 'pube', # not 'pubis'
'dui': 'dui', # not 'duo'
'taxis': 'taxi', # not 'taxis'
# Work around errors that Morphy makes.
'alas': 'alas',
'corps': 'corps',
'cos': 'cos',
'enured': 'enure',
'fiver': 'fiver',
'hinder': 'hinder',
'lobed': 'lobe',
'offerer': 'offerer',
'outer': 'outer',
'sang': 'sing',
'singing': 'sing',
'solderer': 'solderer',
'tined': 'tine',
'twiner': 'twiner',
'us': 'us',
# Stem common nouns whose plurals are apparently ambiguous
'teeth': 'tooth',
'things': 'thing',
'people': 'person',
# Tokenization artifacts
'wo': 'will',
'ca': 'can',
"n't": 'not',
}
AMBIGUOUS_EXCEPTIONS = {
# Avoid nouns that shadow more common verbs.
'am': 'be',
'as': 'as',
'are': 'be',
'ate': 'eat',
'bent': 'bend',
'drove': 'drive',
'fell': 'fall',
'felt': 'feel',
'found': 'find',
'has': 'have',
'lit': 'light',
'lost': 'lose',
'sat': 'sit',
'saw': 'see',
'sent': 'send',
'shook': 'shake',
'shot': 'shoot',
'slain': 'slay',
'spoke': 'speak',
'stole': 'steal',
'sung': 'sing',
'thought': 'think',
'tore': 'tear',
'was': 'be',
'won': 'win',
'feed': 'feed',
}
def _word_badness(word):
"""
Assign a heuristic to possible outputs from Morphy. Minimizing this
heuristic avoids incorrect stems.
"""
if word.endswith('e'):
return len(word) - 2
elif word.endswith('ess'):
return len(word) - 10
elif word.endswith('ss'):
return len(word) - 4
else:
return len(word)
def _morphy_best(word, pos=None):
"""
Get the most likely stem for a word using Morphy, once the input has been
pre-processed by morphy_stem().
"""
results = []
if pos is None:
pos = 'nvar'
for pos_item in pos:
results.extend(morphy(word, pos_item))
if not results:
return None
results.sort(key=lambda x: _word_badness(x))
return results[0]
def morphy_stem(word, pos=None):
"""
Get the most likely stem for a word. If a part of speech is supplied,
the stem will be more accurate.
Valid parts of speech are:
- 'n' or 'NN' for nouns
- 'v' or 'VB' for verbs
- 'a' or 'JJ' for adjectives
- 'r' or 'RB' for adverbs
Any other part of speech will be treated as unknown.
"""
word = word.lower()
if pos is not None:
if pos.startswith('NN'):
pos = 'n'
elif pos.startswith('VB'):
pos = 'v'
elif pos.startswith('JJ'):
pos = 'a'
elif pos.startswith('RB'):
pos = 'r'
if pos is None and word.endswith('ing') or word.endswith('ed'):
pos = 'v'
if pos is not None and pos not in 'nvar':
pos = None
if word in EXCEPTIONS:
return EXCEPTIONS[word]
if pos is None:
if word in AMBIGUOUS_EXCEPTIONS:
return AMBIGUOUS_EXCEPTIONS[word]
return _morphy_best(word, pos) or word
def tag_and_stem(text):
"""
Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later
"""
tokens = tokenize(text)
tagged = nltk.pos_tag(tokens)
out = []
for token, tag in tagged:
stem = morphy_stem(token, tag)
out.append((stem, tag, token))
return out
def good_lemma(lemma):
return lemma and lemma not in STOPWORDS and lemma[0].isalnum()
def normalize(text):
"""
Get a string made from the non-stopword word stems in the text. See
normalize_list().
"""
return untokenize(normalize_list(text))
def normalize_topic(topic):
"""
Get a canonical representation of a Wikipedia topic, which may include
a disambiguation string in parentheses.
Returns (name, disambig), where "name" is the normalized topic name,
and "disambig" is a string corresponding to the disambiguation text or
None.
"""
# find titles of the form Foo (bar)
topic = topic.replace('_', ' ')
match = re.match(r'([^(]+) \(([^)]+)\)', topic)
if not match:
return normalize(topic), None
else:
return normalize(match.group(1)), 'n/' + match.group(2).strip(' _')
def word_frequency(word, default_freq=0):
raise NotImplementedError("Word frequency is now in the wordfreq package.")
def get_wordlist():
raise NotImplementedError("Wordlists are now in the wordfreq package.")
|
commonsense/metanl | metanl/nltk_morphy.py | normalize_topic | python | def normalize_topic(topic):
# find titles of the form Foo (bar)
topic = topic.replace('_', ' ')
match = re.match(r'([^(]+) \(([^)]+)\)', topic)
if not match:
return normalize(topic), None
else:
return normalize(match.group(1)), 'n/' + match.group(2).strip(' _') | Get a canonical representation of a Wikipedia topic, which may include
a disambiguation string in parentheses.
Returns (name, disambig), where "name" is the normalized topic name,
and "disambig" is a string corresponding to the disambiguation text or
None. | train | https://github.com/commonsense/metanl/blob/4b9ae8353489cc409bebd7e1fe10ab5b527b078e/metanl/nltk_morphy.py#L205-L220 | [
"def normalize(text):\n \"\"\"\n Get a string made from the non-stopword word stems in the text. See\n normalize_list().\n \"\"\"\n return untokenize(normalize_list(text))\n"
] | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import nltk
from nltk.corpus import wordnet
from metanl.token_utils import untokenize, tokenize
import re
try:
morphy = wordnet._morphy
except LookupError:
nltk.download('wordnet')
morphy = wordnet._morphy
STOPWORDS = ['the', 'a', 'an']
EXCEPTIONS = {
# Avoid obsolete and obscure roots, the way lexicographers don't.
'wrought': 'wrought', # not 'work'
'media': 'media', # not 'medium'
'installed': 'install', # not 'instal'
'installing': 'install',# not 'instal'
'synapses': 'synapse', # not 'synapsis'
'soles': 'sole', # not 'sol'
'pubes': 'pube', # not 'pubis'
'dui': 'dui', # not 'duo'
'taxis': 'taxi', # not 'taxis'
# Work around errors that Morphy makes.
'alas': 'alas',
'corps': 'corps',
'cos': 'cos',
'enured': 'enure',
'fiver': 'fiver',
'hinder': 'hinder',
'lobed': 'lobe',
'offerer': 'offerer',
'outer': 'outer',
'sang': 'sing',
'singing': 'sing',
'solderer': 'solderer',
'tined': 'tine',
'twiner': 'twiner',
'us': 'us',
# Stem common nouns whose plurals are apparently ambiguous
'teeth': 'tooth',
'things': 'thing',
'people': 'person',
# Tokenization artifacts
'wo': 'will',
'ca': 'can',
"n't": 'not',
}
AMBIGUOUS_EXCEPTIONS = {
# Avoid nouns that shadow more common verbs.
'am': 'be',
'as': 'as',
'are': 'be',
'ate': 'eat',
'bent': 'bend',
'drove': 'drive',
'fell': 'fall',
'felt': 'feel',
'found': 'find',
'has': 'have',
'lit': 'light',
'lost': 'lose',
'sat': 'sit',
'saw': 'see',
'sent': 'send',
'shook': 'shake',
'shot': 'shoot',
'slain': 'slay',
'spoke': 'speak',
'stole': 'steal',
'sung': 'sing',
'thought': 'think',
'tore': 'tear',
'was': 'be',
'won': 'win',
'feed': 'feed',
}
def _word_badness(word):
"""
Assign a heuristic to possible outputs from Morphy. Minimizing this
heuristic avoids incorrect stems.
"""
if word.endswith('e'):
return len(word) - 2
elif word.endswith('ess'):
return len(word) - 10
elif word.endswith('ss'):
return len(word) - 4
else:
return len(word)
def _morphy_best(word, pos=None):
"""
Get the most likely stem for a word using Morphy, once the input has been
pre-processed by morphy_stem().
"""
results = []
if pos is None:
pos = 'nvar'
for pos_item in pos:
results.extend(morphy(word, pos_item))
if not results:
return None
results.sort(key=lambda x: _word_badness(x))
return results[0]
def morphy_stem(word, pos=None):
"""
Get the most likely stem for a word. If a part of speech is supplied,
the stem will be more accurate.
Valid parts of speech are:
- 'n' or 'NN' for nouns
- 'v' or 'VB' for verbs
- 'a' or 'JJ' for adjectives
- 'r' or 'RB' for adverbs
Any other part of speech will be treated as unknown.
"""
word = word.lower()
if pos is not None:
if pos.startswith('NN'):
pos = 'n'
elif pos.startswith('VB'):
pos = 'v'
elif pos.startswith('JJ'):
pos = 'a'
elif pos.startswith('RB'):
pos = 'r'
if pos is None and word.endswith('ing') or word.endswith('ed'):
pos = 'v'
if pos is not None and pos not in 'nvar':
pos = None
if word in EXCEPTIONS:
return EXCEPTIONS[word]
if pos is None:
if word in AMBIGUOUS_EXCEPTIONS:
return AMBIGUOUS_EXCEPTIONS[word]
return _morphy_best(word, pos) or word
def tag_and_stem(text):
"""
Returns a list of (stem, tag, token) triples:
- stem: the word's uninflected form
- tag: the word's part of speech
- token: the original word, so we can reconstruct it later
"""
tokens = tokenize(text)
tagged = nltk.pos_tag(tokens)
out = []
for token, tag in tagged:
stem = morphy_stem(token, tag)
out.append((stem, tag, token))
return out
def good_lemma(lemma):
return lemma and lemma not in STOPWORDS and lemma[0].isalnum()
def normalize_list(text):
"""
Get a list of word stems that appear in the text. Stopwords and an initial
'to' will be stripped, unless this leaves nothing in the stem.
>>> normalize_list('the dog')
['dog']
>>> normalize_list('big dogs')
['big', 'dog']
>>> normalize_list('the')
['the']
"""
pieces = [morphy_stem(word) for word in tokenize(text)]
pieces = [piece for piece in pieces if good_lemma(piece)]
if not pieces:
return [text]
if pieces[0] == 'to':
pieces = pieces[1:]
return pieces
def normalize(text):
"""
Get a string made from the non-stopword word stems in the text. See
normalize_list().
"""
return untokenize(normalize_list(text))
def word_frequency(word, default_freq=0):
raise NotImplementedError("Word frequency is now in the wordfreq package.")
def get_wordlist():
raise NotImplementedError("Wordlists are now in the wordfreq package.")
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | _convert_operator | python | def _convert_operator(op_name, attrs, identity_list=None, convert_map=None):
identity_list = identity_list if identity_list else _identity_list
convert_map = convert_map if convert_map else _convert_map
if op_name in identity_list:
pass
elif op_name in convert_map:
op_name, attrs = convert_map[op_name](attrs)
else:
raise NotImplementedError("Operator {} not implemented.".format(op_name))
op = getattr(mx.sym, op_name, None)
if not op:
raise RuntimeError("Unable to map op_name {} to sym".format(op_name))
return op, attrs | Convert from onnx operator to mxnet operator.
The converter must specify conversions explicitly for incompatible name, and
apply handlers to operator attributes.
Parameters
----------
op_name : str
Operator name, such as Convolution, FullyConnected
attrs : dict
Dict of operator attributes
identity_list : list
List of operators that don't require conversion
convert_map : dict
Dict of name : callable, where name is the op's name that
require conversion to mxnet, callable are functions which
take attrs and return (new_op_name, new_attrs)
Returns
-------
(op_name, attrs)
Converted (op_name, attrs) for mxnet. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L21-L55 | null | # Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
# Derived from Apache 2.0 licensed onnx.py file from DMLC NNVM:
# https://github.com/dmlc/nnvm/blob/3da53e46db57c438b05fbebe8aa332ee8c5994d1/python/nnvm/frontend/onnx.py
# coding: utf-8
# pylint: disable=invalid-name,too-many-locals,no-self-use
""" Support import export formats."""
from __future__ import absolute_import as _abs
import mxnet as mx
from onnx_mxnet.import_helper import _identity_list, _convert_map, _pad_sequence_fix
class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto.from_onnx | python | def from_onnx(self, graph):
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params | Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L68-L160 | [
"def _convert_operator(op_name, attrs, identity_list=None, convert_map=None):\n \"\"\"Convert from onnx operator to mxnet operator.\n The converter must specify conversions explicitly for incompatible name, and\n apply handlers to operator attributes.\n\n Parameters\n ----------\n op_name : str\n ... | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_pooling | python | def _fix_pooling(self, op_name, inputs, new_attr):
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op | onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L162-L173 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_slice | python | def _fix_slice(self, inputs, new_attr):
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op | onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L175-L185 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_squeeze | python | def _fix_squeeze(self, inputs, new_attr):
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op | MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L187-L199 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_max_min | python | def _fix_max_min(self, op_name, inputs):
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op | MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L201-L216 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_gemm | python | def _fix_gemm(self, op_name, inputs, old_attr):
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr | Using FullyConnected operator in place of linalg_gemm to perform same operation | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L219-L232 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._parse_array | python | def _parse_array(self, tensor_proto):
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array) | Grab data in TensorProto and convert to numpy array. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L234-L241 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._parse_attr | python | def _parse_attr(self, attr_proto):
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs | Convert a list of AttributeProto to a dict, with names as keys. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L243-L262 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_outputs | python | def _fix_outputs(self, op, outputs):
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs | A workaround to handle dropout or similar operator that have more than one out
in ONNX. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L264-L271 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_bias | python | def _fix_bias(self, op, attrs, num_inputs):
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs | A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L273-L284 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_bias_shape | python | def _fix_bias_shape(self, op_name, inputs, attrs):
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias | A workaround to reshape bias term to (1, num_channel). | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L287-L299 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_channels(self, op, attrs, inputs):
"""A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number.
"""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs
|
onnx/onnx-mxnet | onnx_mxnet/import_onnx.py | GraphProto._fix_channels | python | def _fix_channels(self, op, attrs, inputs):
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
weight_name = self._renames[inputs[1]]
if not weight_name in self._params:
raise ValueError("Unable to get channels/units attr from onnx graph.")
else:
wshape = self._params[weight_name].shape
assert len(wshape) >= 2, "Weights shape is invalid: {}".format(wshape)
if op in [mx.sym.FullyConnected]:
attrs['num_hidden'] = wshape[0]
else:
if op == mx.sym.Convolution:
# Weight shape for Conv and FC: (M x C x kH x kW) : M is number of
# feature maps/hidden and C is number of channels
attrs['num_filter'] = wshape[0]
elif op == mx.sym.Deconvolution:
# Weight shape for DeConv : (C x M x kH x kW) : M is number of
# feature maps/filters and C is number of channels
attrs['num_filter'] = wshape[1]
return attrs | A workaround for getting 'channels' or 'units' since onnx don't provide
these attributes. We check the shape of weights provided to get the number. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_onnx.py#L302-L326 | null | class GraphProto(object): # pylint: disable=too-few-public-methods
"""A helper class for handling mxnet symbol copying from pb2.GraphProto.
Definition: https://github.com/onnx/onnx/blob/master/onnx/onnx.proto
"""
def __init__(self):
self._nodes = {}
self._params = {}
self._renames = {}
self._num_input = 0
self._num_param = 0
def from_onnx(self, graph):
"""Construct symbol from onnx graph.
The inputs from onnx graph is vague, only providing "1", "2"...
For convenience, we rename the `real` input names to "input_0",
"input_1"... And renaming parameters to "param_0", "param_1"...
Parameters
----------
graph : onnx protobuf object
The loaded onnx graph
Returns
-------
sym :mx.sym
The returned mxnet symbol
params : dict
A dict of name: mx.nd.array pairs, used as pretrained weights
"""
# parse network inputs, aka parameters
for init_tensor in graph.initializer:
if not init_tensor.name.strip():
raise ValueError("Tensor's name is required.")
self._params[init_tensor.name] = self._parse_array(init_tensor)
# converting GraphProto message
for i in graph.input:
if i.name in self._params:
# i is a param instead of input
name_param = 'param_{}'.format(self._num_param)
self._num_param += 1
self._params[name_param] = self._params.pop(i.name)
self._nodes[name_param] = mx.sym.Variable(name=name_param,
shape=self._params[name_param].shape)
self._renames[i.name] = name_param
else:
name_input = 'input_{}'.format(self._num_input)
self._num_input += 1
self._nodes[name_input] = mx.sym.Variable(name=name_input)
self._renames[i.name] = name_input
# constructing nodes, nodes are stored as directed acyclic graph
# converting NodeProto message
for node in graph.node:
op_name = node.op_type
node_name = node.name.strip()
node_name = node_name if node_name else None
onnx_attr = self._parse_attr(node.attribute)
new_op, mx_attr = _convert_operator(op_name, onnx_attr)
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# some workarounds for inconsistencies in onnx and mxnet conventions.
mx_attr = self._fix_bias(new_op, mx_attr, len(inputs))
mx_attr = self._fix_channels(new_op, mx_attr, list(node.input))
self._fix_bias_shape(node.op_type, node.input, onnx_attr)
# calling again to get new symbols after some workarounds
inputs = [self._nodes[self._renames.get(i, i)] for i in node.input]
# onnx's Gemm operator also supports broadcasting C input which
# mxnet's equivalent linalg_gemm doesn't. So using combination of
# transpose and FullyConnected operators.
if op_name == 'Gemm':
new_op, inputs, mx_attr = self._fix_gemm('FullyConnected', inputs, onnx_attr)
# onnx slice works on multiple axes whereas mxnet's slice_axis is for single axis
if op_name == 'Slice':
op = self._fix_slice(inputs, mx_attr)
elif op_name == 'AveragePool' and onnx_attr.get('pads') is not None or \
op_name == 'MaxPool' and onnx_attr.get('pads') is not None:
op = self._fix_pooling(op_name, inputs, onnx_attr)
elif op_name == 'Squeeze':
op = self._fix_squeeze(inputs, mx_attr)
elif op_name == 'Max' or op_name == 'Min':
op = self._fix_max_min(op_name, inputs)
elif node_name is None:
op = new_op(*inputs, **mx_attr)
else:
op = new_op(name=node_name, *inputs, **mx_attr)
node_output = self._fix_outputs(op_name, node.output)
assert len(node_output) == len(op.list_outputs()), (
"Number of output mismatch {} vs {} in {}.".format(
len(node_output), len(op.list_outputs()), op_name))
for k, i in zip(list(node_output), range(len(node_output))):
self._nodes[k] = op[i]
# now return the outputs
out = [self._nodes[i.name] for i in graph.output]
if len(out) > 1:
out = mx.sym.Group(out)
else:
out = out[0]
return out, self._params
def _fix_pooling(self, op_name, inputs, new_attr):
"""onnx pooling operator supports asymmetrical padding
Adding pad operator before pooling in mxnet to work with onnx"""
pool_type = 'avg' if op_name == 'AveragePool' else 'max'
stride = new_attr.get('strides')
kernel = new_attr.get('kernel_shape')
padding = new_attr.get('pads')
pad_width = (0, 0, 0, 0) + _pad_sequence_fix(padding, len(kernel))
new_pad_op = mx.sym.pad(inputs[0], mode='constant', pad_width=pad_width)
new_pooling_op = mx.sym.Pooling(new_pad_op, pool_type=pool_type,
stride=stride, kernel=kernel)
return new_pooling_op
def _fix_slice(self, inputs, new_attr):
"""onnx slice provides slicing on multiple axis. Adding multiple slice_axis operator
for multiple axes from mxnet"""
begin = new_attr.get('begin')
end = new_attr.get('end')
axes = new_attr.get('axis', tuple(range(len(begin))))
slice_op = mx.sym.slice_axis(inputs[0], axis=axes[0], begin=begin[0], end=end[0])
if len(axes) > 1:
for i, axis in enumerate(axes):
slice_op = mx.sym.slice_axis(slice_op, axis=axis, begin=begin[i], end=end[i])
return slice_op
def _fix_squeeze(self, inputs, new_attr):
"""
MXNet doesnt have a squeeze operator.
Using "split" to perform similar operation.
"split" can be slower compared to "reshape".
This can have performance impact.
TODO: Remove this implementation once mxnet adds the support.
"""
axes = new_attr.get('axis')
op = mx.sym.split(inputs[0], axis=axes[0], num_outputs=1, squeeze_axis=1)
for i in axes[1:]:
op = mx.sym.split(op, axis=i-1, num_outputs=1, squeeze_axis=1)
return op
def _fix_max_min(self, op_name, inputs):
""" MXNet maximum/minimum compares only two symbols at a time.
ONNX can send more than two to compare.
Breaking into multiple mxnet ops to compare two symbols at a time"""
if len(inputs) > 1:
if op_name == 'Max':
op = mx.sym.maximum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.maximum(op, ip)
elif op_name == 'Min':
op = mx.sym.minimum(inputs[0], inputs[1])
for ip in inputs[2:]:
op = mx.sym.minimum(op, ip)
else:
op = inputs[0]
return op
def _fix_gemm(self, op_name, inputs, old_attr):
"""Using FullyConnected operator in place of linalg_gemm to perform same operation"""
op = getattr(mx.sym, op_name, None)
alpha = float(old_attr.get('alpha', 1.0))
beta = float(old_attr.get('beta', 1.0))
transA = int(old_attr.get('transA', 0))
transB = int(old_attr.get('transB', 0))
if transA:
inputs[0] = mx.sym.transpose(inputs[0], axes=(1, 0))
if not transB:
inputs[1] = mx.sym.transpose(inputs[1], axes=(1, 0))
new_inputs = [alpha*inputs[0], inputs[1], beta*inputs[2]]
new_attr = {'num_hidden' : self._params[inputs[2].name].shape[0]}
return op, new_inputs, new_attr
def _parse_array(self, tensor_proto):
"""Grab data in TensorProto and convert to numpy array."""
try:
from onnx.numpy_helper import to_array
except ImportError as e:
raise ImportError("Unable to import onnx which is required {}".format(e))
np_array = to_array(tensor_proto).reshape(tuple(tensor_proto.dims))
return mx.nd.array(np_array)
def _parse_attr(self, attr_proto):
"""Convert a list of AttributeProto to a dict, with names as keys."""
attrs = {}
for a in attr_proto:
for f in ['f', 'i', 's']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['floats', 'ints', 'strings']:
if list(getattr(a, f)):
assert a.name not in attrs, "Only one type of attr is allowed"
attrs[a.name] = tuple(getattr(a, f))
for f in ['t', 'g']:
if a.HasField(f):
attrs[a.name] = getattr(a, f)
for f in ['tensors', 'graphs']:
if list(getattr(a, f)):
raise NotImplementedError("Filed {} is not supported in mxnet.".format(f))
if a.name not in attrs:
raise ValueError("Cannot parse attribute: \n{}\n.".format(a))
return attrs
def _fix_outputs(self, op, outputs):
"""A workaround to handle dropout or similar operator that have more than one out
in ONNX.
"""
if op == 'Dropout':
assert len(outputs) == 2, "ONNX have two outputs for dropout layer."
outputs = outputs[:-1]
return outputs
def _fix_bias(self, op, attrs, num_inputs):
"""A workaround for 'use_bias' attribute since onnx don't provide this attribute,
we have to check the number of inputs to decide it."""
if op not in [mx.sym.Convolution, mx.sym.Deconvolution, mx.sym.FullyConnected]:
return attrs
if num_inputs == 3:
attrs['no_bias'] = False
elif num_inputs == 2:
attrs['no_bias'] = True
else:
raise ValueError("Unexpected number of inputs for: {}".format(op))
return attrs
def _fix_bias_shape(self, op_name, inputs, attrs):
"""A workaround to reshape bias term to (1, num_channel)."""
if (op_name == 'Add' or op_name == 'Mul') and (int(len(self._params)) > 0) and \
('broadcast' in attrs and attrs['broadcast'] == 1):
assert len(list(inputs)) == 2
bias_name = self._renames.get(inputs[1], inputs[1])
bias = self._params[bias_name]
assert len(bias.shape) == 1
# reshape to (1, n)
bias = mx.nd.array(bias.asnumpy().reshape((1, -1, 1, 1)))
# broadcast_add expects shape with sym.variable
self._nodes[bias_name] = mx.sym.Variable(name=bias_name, shape=bias.shape)
self._params[bias_name] = bias
|
onnx/onnx-mxnet | onnx_mxnet/backend_rep.py | MXNetBackendRep.run | python | def run(self, inputs, **kwargs):
input_data = np.asarray(inputs[0], dtype='f')
# create module, passing cpu context
if self.device == 'CPU':
ctx = mx.cpu()
else:
raise NotImplementedError("Only CPU context is supported for now")
mod = mx.mod.Module(symbol=self.symbol, data_names=['input_0'], context=ctx,
label_names=None)
mod.bind(for_training=False, data_shapes=[('input_0', input_data.shape)],
label_shapes=None)
mod.set_params(arg_params=self.params, aux_params=None)
# run inference
batch = namedtuple('Batch', ['data'])
mod.forward(batch([mx.nd.array(input_data)]))
result = mod.get_outputs()[0].asnumpy()
return [result] | Run model inference and return the result
Parameters
----------
inputs : numpy array
input to run a layer on
Returns
-------
params : numpy array
result obtained after running the inference on mxnet | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/backend_rep.py#L36-L68 | null | class MXNetBackendRep(BackendRep):
"""Running model inference on mxnet engine and return the result
to onnx test infrastructure for comparison."""
def __init__(self, symbol, params, device):
self.symbol = symbol
self.params = params
self.device = device
|
onnx/onnx-mxnet | onnx_mxnet/common.py | AttributeConverter._parse_default | python | def _parse_default(self, target):
if not isinstance(target, (list, tuple)):
k, v, t = target, None, lambda x: x
elif len(target) == 1:
k, v, t = target[0], None, lambda x: x
elif len(target) == 2:
k, v, t = target[0], target[1], lambda x: x
elif len(target) > 2:
k, v, t = target[0], target[1], target[2]
else:
k = None # should raise
if not isinstance(k, string_types):
msg = "{} is not a valid target, (name, default) expected.".format(target)
raise ValueError(msg)
return k, v, t | Helper function to parse default values. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/common.py#L114-L129 | null | class AttributeConverter(object):
"""Common attribute converter. An AttributeConverter instance is a callable:
```
attr_converter = AttributeConverter(op_name, transforms={'a':'b', 'c':('d', 1)})
new_op_name, new_attr = attr_converter(attrs)
```
Parameters
----------
op_name : str or callable
If set as str, returned operator name is the str.
If set as callable, returned operator is the str returned by calling:
`op_name = func(attr)`
transforms : dict of `new_name, or (new_name, default_value, transform function)`
If only a new_name is provided, it's like renaming the attribute name.
If default_value if provided, then the attribute is considered as optional.
If transform function is provided, the original attribute value is handled
by transform function.
excludes : list
A list of excluded attributes that should `NOT` appear.
Raise NotImplementedError if occurred.
disables : list
A list of attributes that is disabled in mxnet. Raise warnings.
ignores : list
A list of attributes that is ignored in mxnet. Silent.
extras : dict
A series of additional attributes should be added anyway to the returned
attribute dict.
custom_check : callable
A custom function takes attribute, and return True/False.
Raise RuntimeError if not bool(True) returned.
"""
def __init__(self, op_name, transforms=None,
excludes=None, disables=None, ignores=None,
extras=None, custom_check=None):
self._op_name = op_name
self._transforms = transforms if transforms else {}
self._excludes = excludes if excludes else []
self._disables = disables if disables else []
self._ignores = ignores if ignores else []
self._extras = extras if extras else {}
self._custom_check = custom_check
def __call__(self, attrs):
# apply custom check
if self._custom_check:
func, msg = self._custom_check
if not func(attrs):
raise RuntimeError("Check failed: {}".format(msg))
# get new op_name
if isinstance(self._op_name, string_types):
op_name = self._op_name
else:
assert callable(self._op_name), "op_name can either be string or callable"
op_name = self._op_name(attrs)
# convert attributes
new_attrs = {}
for k in attrs.keys():
if k in self._excludes:
raise NotImplementedError("Attribute {} not supported yet.".format(k))
elif k in self._ignores:
pass
elif k in self._transforms:
new_name, defaults, transform = self._parse_default(self._transforms[k])
if defaults is None:
new_attr = self._required_attr(attrs, k)
else:
new_attr = attrs.get(k, None)
if new_attr is None:
new_attrs[new_name] = defaults
else:
new_attrs[new_name] = transform(new_attr)
else:
# copy
new_attrs[k] = attrs[k]
# add extras
new_attrs.update(self._extras)
return op_name, new_attrs
def _parse_bool(self, value):
"""Helper function to parse default boolean values."""
if isinstance(value, string_types):
return value.strip().lower() in ['true', '1', 't', 'y', 'yes']
return bool(value)
def _required_attr(self, attr, key):
"""Wrapper for getting required attributes."""
assert isinstance(attr, dict)
if key not in attr:
raise AttributeError("Required attribute {} not found.".format(key))
return attr[key]
|
onnx/onnx-mxnet | onnx_mxnet/common.py | AttributeConverter._parse_bool | python | def _parse_bool(self, value):
if isinstance(value, string_types):
return value.strip().lower() in ['true', '1', 't', 'y', 'yes']
return bool(value) | Helper function to parse default boolean values. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/common.py#L131-L135 | null | class AttributeConverter(object):
"""Common attribute converter. An AttributeConverter instance is a callable:
```
attr_converter = AttributeConverter(op_name, transforms={'a':'b', 'c':('d', 1)})
new_op_name, new_attr = attr_converter(attrs)
```
Parameters
----------
op_name : str or callable
If set as str, returned operator name is the str.
If set as callable, returned operator is the str returned by calling:
`op_name = func(attr)`
transforms : dict of `new_name, or (new_name, default_value, transform function)`
If only a new_name is provided, it's like renaming the attribute name.
If default_value if provided, then the attribute is considered as optional.
If transform function is provided, the original attribute value is handled
by transform function.
excludes : list
A list of excluded attributes that should `NOT` appear.
Raise NotImplementedError if occurred.
disables : list
A list of attributes that is disabled in mxnet. Raise warnings.
ignores : list
A list of attributes that is ignored in mxnet. Silent.
extras : dict
A series of additional attributes should be added anyway to the returned
attribute dict.
custom_check : callable
A custom function takes attribute, and return True/False.
Raise RuntimeError if not bool(True) returned.
"""
def __init__(self, op_name, transforms=None,
excludes=None, disables=None, ignores=None,
extras=None, custom_check=None):
self._op_name = op_name
self._transforms = transforms if transforms else {}
self._excludes = excludes if excludes else []
self._disables = disables if disables else []
self._ignores = ignores if ignores else []
self._extras = extras if extras else {}
self._custom_check = custom_check
def __call__(self, attrs):
# apply custom check
if self._custom_check:
func, msg = self._custom_check
if not func(attrs):
raise RuntimeError("Check failed: {}".format(msg))
# get new op_name
if isinstance(self._op_name, string_types):
op_name = self._op_name
else:
assert callable(self._op_name), "op_name can either be string or callable"
op_name = self._op_name(attrs)
# convert attributes
new_attrs = {}
for k in attrs.keys():
if k in self._excludes:
raise NotImplementedError("Attribute {} not supported yet.".format(k))
elif k in self._ignores:
pass
elif k in self._transforms:
new_name, defaults, transform = self._parse_default(self._transforms[k])
if defaults is None:
new_attr = self._required_attr(attrs, k)
else:
new_attr = attrs.get(k, None)
if new_attr is None:
new_attrs[new_name] = defaults
else:
new_attrs[new_name] = transform(new_attr)
else:
# copy
new_attrs[k] = attrs[k]
# add extras
new_attrs.update(self._extras)
return op_name, new_attrs
def _parse_default(self, target):
"""Helper function to parse default values."""
if not isinstance(target, (list, tuple)):
k, v, t = target, None, lambda x: x
elif len(target) == 1:
k, v, t = target[0], None, lambda x: x
elif len(target) == 2:
k, v, t = target[0], target[1], lambda x: x
elif len(target) > 2:
k, v, t = target[0], target[1], target[2]
else:
k = None # should raise
if not isinstance(k, string_types):
msg = "{} is not a valid target, (name, default) expected.".format(target)
raise ValueError(msg)
return k, v, t
def _required_attr(self, attr, key):
"""Wrapper for getting required attributes."""
assert isinstance(attr, dict)
if key not in attr:
raise AttributeError("Required attribute {} not found.".format(key))
return attr[key]
|
onnx/onnx-mxnet | onnx_mxnet/common.py | AttributeConverter._required_attr | python | def _required_attr(self, attr, key):
assert isinstance(attr, dict)
if key not in attr:
raise AttributeError("Required attribute {} not found.".format(key))
return attr[key] | Wrapper for getting required attributes. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/common.py#L137-L142 | null | class AttributeConverter(object):
"""Common attribute converter. An AttributeConverter instance is a callable:
```
attr_converter = AttributeConverter(op_name, transforms={'a':'b', 'c':('d', 1)})
new_op_name, new_attr = attr_converter(attrs)
```
Parameters
----------
op_name : str or callable
If set as str, returned operator name is the str.
If set as callable, returned operator is the str returned by calling:
`op_name = func(attr)`
transforms : dict of `new_name, or (new_name, default_value, transform function)`
If only a new_name is provided, it's like renaming the attribute name.
If default_value if provided, then the attribute is considered as optional.
If transform function is provided, the original attribute value is handled
by transform function.
excludes : list
A list of excluded attributes that should `NOT` appear.
Raise NotImplementedError if occurred.
disables : list
A list of attributes that is disabled in mxnet. Raise warnings.
ignores : list
A list of attributes that is ignored in mxnet. Silent.
extras : dict
A series of additional attributes should be added anyway to the returned
attribute dict.
custom_check : callable
A custom function takes attribute, and return True/False.
Raise RuntimeError if not bool(True) returned.
"""
def __init__(self, op_name, transforms=None,
excludes=None, disables=None, ignores=None,
extras=None, custom_check=None):
self._op_name = op_name
self._transforms = transforms if transforms else {}
self._excludes = excludes if excludes else []
self._disables = disables if disables else []
self._ignores = ignores if ignores else []
self._extras = extras if extras else {}
self._custom_check = custom_check
def __call__(self, attrs):
# apply custom check
if self._custom_check:
func, msg = self._custom_check
if not func(attrs):
raise RuntimeError("Check failed: {}".format(msg))
# get new op_name
if isinstance(self._op_name, string_types):
op_name = self._op_name
else:
assert callable(self._op_name), "op_name can either be string or callable"
op_name = self._op_name(attrs)
# convert attributes
new_attrs = {}
for k in attrs.keys():
if k in self._excludes:
raise NotImplementedError("Attribute {} not supported yet.".format(k))
elif k in self._ignores:
pass
elif k in self._transforms:
new_name, defaults, transform = self._parse_default(self._transforms[k])
if defaults is None:
new_attr = self._required_attr(attrs, k)
else:
new_attr = attrs.get(k, None)
if new_attr is None:
new_attrs[new_name] = defaults
else:
new_attrs[new_name] = transform(new_attr)
else:
# copy
new_attrs[k] = attrs[k]
# add extras
new_attrs.update(self._extras)
return op_name, new_attrs
def _parse_default(self, target):
"""Helper function to parse default values."""
if not isinstance(target, (list, tuple)):
k, v, t = target, None, lambda x: x
elif len(target) == 1:
k, v, t = target[0], None, lambda x: x
elif len(target) == 2:
k, v, t = target[0], target[1], lambda x: x
elif len(target) > 2:
k, v, t = target[0], target[1], target[2]
else:
k = None # should raise
if not isinstance(k, string_types):
msg = "{} is not a valid target, (name, default) expected.".format(target)
raise ValueError(msg)
return k, v, t
def _parse_bool(self, value):
"""Helper function to parse default boolean values."""
if isinstance(value, string_types):
return value.strip().lower() in ['true', '1', 't', 'y', 'yes']
return bool(value)
|
onnx/onnx-mxnet | onnx_mxnet/backend.py | MXNetBackend.make_graph | python | def make_graph(node, inputs):
initializer = []
tensor_input_info = []
tensor_output_info = []
# Adding input tensor info.
for index in range(len(node.input)):
tensor_input_info.append(
helper.make_tensor_value_info(str(node.input[index]), TensorProto.FLOAT, [1]))
# Creating an initializer for Weight params.
# Assumes that weight params is named as 'W'.
# TODO: Handle multiple weight params.
# TODO: Add for "bias" if needed
if node.input[index] == 'W':
dim = inputs[index].shape
param_tensor = helper.make_tensor(
name=node.input[index],
data_type=TensorProto.FLOAT,
dims=dim,
vals=inputs[index].flatten())
initializer.append(param_tensor)
# Adding output tensor info.
for index in range(len(node.output)):
tensor_output_info.append(
helper.make_tensor_value_info(str(node.output[index]), TensorProto.FLOAT, [1]))
# creating graph proto object.
graph_proto = helper.make_graph(
[node],
"test",
tensor_input_info,
tensor_output_info,
initializer=initializer)
return graph_proto | Created ONNX GraphProto from node | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/backend.py#L30-L68 | null | class MXNetBackend(Backend):
"""MXNet backend for ONNX"""
@staticmethod
@classmethod
def run_node(cls, node, inputs, device='CPU'): # pylint: disable=arguments-differ
"""Running individual node inference on mxnet engine and
return the result to onnx test infrastructure.
Parameters
----------
node : onnx node object
loaded onnx node (individual layer)
inputs : numpy array
input to run a node on
device : 'CPU'
device to run a node on
Returns
-------
params : numpy array
result obtained after running the operator
"""
graph = GraphProto()
sym, _ = graph.from_onnx(MXNetBackend.make_graph(node, inputs))
data_names = [i for i in sym.get_internals().list_inputs()]
data_shapes = []
reduce_op_types = set(['ReduceMin', 'ReduceMax', 'ReduceMean',
'ReduceProd', 'ReduceSum', 'Slice', 'Pad',
'Squeeze', 'Upsample', 'Reshape', 'Conv', 'ConvTranspose'])
# Adding extra dimension of batch_size 1 if the batch_size is different for multiple inputs.
for idx, input_name in enumerate(data_names):
batch_size = 1
if len(inputs[idx].shape) < 4 and len(inputs) > 1 and \
len(set(x.shape[0] for x in inputs)) != 1:
tuples = ((batch_size,), inputs[idx].shape)
new_shape = sum(tuples, ())
data_shapes.append((input_name, new_shape))
else:
data_shapes.append((input_name, inputs[idx].shape))
# create module, passing cpu context
if device == 'CPU':
ctx = mx.cpu()
else:
raise NotImplementedError("Only CPU context is supported for now")
# create a module
mod = mx.mod.Module(symbol=sym, data_names=data_names, context=ctx, label_names=None)
mod.bind(for_training=False, data_shapes=data_shapes, label_shapes=None)
# initializing parameters for calculating result of each individual node
mod.init_params()
data_forward = []
for idx, input_name in enumerate(data_names):
# slice and pad operator tests needs 1 less dimension in forward pass
# otherwise it will throw an error.
# for squeeze operator, need to retain shape of input as provided
val = inputs[idx]
if node.op_type in reduce_op_types:
data_forward.append(mx.nd.array(val))
else:
data_forward.append(mx.nd.array([val]))
mod.forward(mx.io.DataBatch(data_forward))
result = mod.get_outputs()[0].asnumpy()
if node.op_type in reduce_op_types:
return [result]
return result
@classmethod
def prepare(cls, model, device='CPU', **kwargs):
"""For running end to end model(used for onnx test backend)
Parameters
----------
model : onnx ModelProto object
loaded onnx graph
device : 'CPU'
specifying device to run test on
kwargs :
other arguments
Returns
-------
MXNetBackendRep : object
Returns object of MXNetBackendRep class which will be in turn
used to run inference on the input model and return the result for comparison.
"""
graph = GraphProto()
sym, params = graph.from_onnx(model.graph)
return MXNetBackendRep(sym, params, device)
@classmethod
def supports_device(cls, device):
"""Supports only CPU for testing"""
return device == 'CPU'
|
onnx/onnx-mxnet | onnx_mxnet/backend.py | MXNetBackend.run_node | python | def run_node(cls, node, inputs, device='CPU'): # pylint: disable=arguments-differ
graph = GraphProto()
sym, _ = graph.from_onnx(MXNetBackend.make_graph(node, inputs))
data_names = [i for i in sym.get_internals().list_inputs()]
data_shapes = []
reduce_op_types = set(['ReduceMin', 'ReduceMax', 'ReduceMean',
'ReduceProd', 'ReduceSum', 'Slice', 'Pad',
'Squeeze', 'Upsample', 'Reshape', 'Conv', 'ConvTranspose'])
# Adding extra dimension of batch_size 1 if the batch_size is different for multiple inputs.
for idx, input_name in enumerate(data_names):
batch_size = 1
if len(inputs[idx].shape) < 4 and len(inputs) > 1 and \
len(set(x.shape[0] for x in inputs)) != 1:
tuples = ((batch_size,), inputs[idx].shape)
new_shape = sum(tuples, ())
data_shapes.append((input_name, new_shape))
else:
data_shapes.append((input_name, inputs[idx].shape))
# create module, passing cpu context
if device == 'CPU':
ctx = mx.cpu()
else:
raise NotImplementedError("Only CPU context is supported for now")
# create a module
mod = mx.mod.Module(symbol=sym, data_names=data_names, context=ctx, label_names=None)
mod.bind(for_training=False, data_shapes=data_shapes, label_shapes=None)
# initializing parameters for calculating result of each individual node
mod.init_params()
data_forward = []
for idx, input_name in enumerate(data_names):
# slice and pad operator tests needs 1 less dimension in forward pass
# otherwise it will throw an error.
# for squeeze operator, need to retain shape of input as provided
val = inputs[idx]
if node.op_type in reduce_op_types:
data_forward.append(mx.nd.array(val))
else:
data_forward.append(mx.nd.array([val]))
mod.forward(mx.io.DataBatch(data_forward))
result = mod.get_outputs()[0].asnumpy()
if node.op_type in reduce_op_types:
return [result]
return result | Running individual node inference on mxnet engine and
return the result to onnx test infrastructure.
Parameters
----------
node : onnx node object
loaded onnx node (individual layer)
inputs : numpy array
input to run a node on
device : 'CPU'
device to run a node on
Returns
-------
params : numpy array
result obtained after running the operator | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/backend.py#L72-L137 | [
"def from_onnx(self, graph):\n \"\"\"Construct symbol from onnx graph.\n The inputs from onnx graph is vague, only providing \"1\", \"2\"...\n For convenience, we rename the `real` input names to \"input_0\",\n \"input_1\"... And renaming parameters to \"param_0\", \"param_1\"...\n\n Parameters\n ... | class MXNetBackend(Backend):
"""MXNet backend for ONNX"""
@staticmethod
def make_graph(node, inputs):
""" Created ONNX GraphProto from node"""
initializer = []
tensor_input_info = []
tensor_output_info = []
# Adding input tensor info.
for index in range(len(node.input)):
tensor_input_info.append(
helper.make_tensor_value_info(str(node.input[index]), TensorProto.FLOAT, [1]))
# Creating an initializer for Weight params.
# Assumes that weight params is named as 'W'.
# TODO: Handle multiple weight params.
# TODO: Add for "bias" if needed
if node.input[index] == 'W':
dim = inputs[index].shape
param_tensor = helper.make_tensor(
name=node.input[index],
data_type=TensorProto.FLOAT,
dims=dim,
vals=inputs[index].flatten())
initializer.append(param_tensor)
# Adding output tensor info.
for index in range(len(node.output)):
tensor_output_info.append(
helper.make_tensor_value_info(str(node.output[index]), TensorProto.FLOAT, [1]))
# creating graph proto object.
graph_proto = helper.make_graph(
[node],
"test",
tensor_input_info,
tensor_output_info,
initializer=initializer)
return graph_proto
@classmethod
@classmethod
def prepare(cls, model, device='CPU', **kwargs):
"""For running end to end model(used for onnx test backend)
Parameters
----------
model : onnx ModelProto object
loaded onnx graph
device : 'CPU'
specifying device to run test on
kwargs :
other arguments
Returns
-------
MXNetBackendRep : object
Returns object of MXNetBackendRep class which will be in turn
used to run inference on the input model and return the result for comparison.
"""
graph = GraphProto()
sym, params = graph.from_onnx(model.graph)
return MXNetBackendRep(sym, params, device)
@classmethod
def supports_device(cls, device):
"""Supports only CPU for testing"""
return device == 'CPU'
|
onnx/onnx-mxnet | onnx_mxnet/backend.py | MXNetBackend.prepare | python | def prepare(cls, model, device='CPU', **kwargs):
graph = GraphProto()
sym, params = graph.from_onnx(model.graph)
return MXNetBackendRep(sym, params, device) | For running end to end model(used for onnx test backend)
Parameters
----------
model : onnx ModelProto object
loaded onnx graph
device : 'CPU'
specifying device to run test on
kwargs :
other arguments
Returns
-------
MXNetBackendRep : object
Returns object of MXNetBackendRep class which will be in turn
used to run inference on the input model and return the result for comparison. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/backend.py#L140-L160 | [
"def from_onnx(self, graph):\n \"\"\"Construct symbol from onnx graph.\n The inputs from onnx graph is vague, only providing \"1\", \"2\"...\n For convenience, we rename the `real` input names to \"input_0\",\n \"input_1\"... And renaming parameters to \"param_0\", \"param_1\"...\n\n Parameters\n ... | class MXNetBackend(Backend):
"""MXNet backend for ONNX"""
@staticmethod
def make_graph(node, inputs):
""" Created ONNX GraphProto from node"""
initializer = []
tensor_input_info = []
tensor_output_info = []
# Adding input tensor info.
for index in range(len(node.input)):
tensor_input_info.append(
helper.make_tensor_value_info(str(node.input[index]), TensorProto.FLOAT, [1]))
# Creating an initializer for Weight params.
# Assumes that weight params is named as 'W'.
# TODO: Handle multiple weight params.
# TODO: Add for "bias" if needed
if node.input[index] == 'W':
dim = inputs[index].shape
param_tensor = helper.make_tensor(
name=node.input[index],
data_type=TensorProto.FLOAT,
dims=dim,
vals=inputs[index].flatten())
initializer.append(param_tensor)
# Adding output tensor info.
for index in range(len(node.output)):
tensor_output_info.append(
helper.make_tensor_value_info(str(node.output[index]), TensorProto.FLOAT, [1]))
# creating graph proto object.
graph_proto = helper.make_graph(
[node],
"test",
tensor_input_info,
tensor_output_info,
initializer=initializer)
return graph_proto
@classmethod
def run_node(cls, node, inputs, device='CPU'): # pylint: disable=arguments-differ
"""Running individual node inference on mxnet engine and
return the result to onnx test infrastructure.
Parameters
----------
node : onnx node object
loaded onnx node (individual layer)
inputs : numpy array
input to run a node on
device : 'CPU'
device to run a node on
Returns
-------
params : numpy array
result obtained after running the operator
"""
graph = GraphProto()
sym, _ = graph.from_onnx(MXNetBackend.make_graph(node, inputs))
data_names = [i for i in sym.get_internals().list_inputs()]
data_shapes = []
reduce_op_types = set(['ReduceMin', 'ReduceMax', 'ReduceMean',
'ReduceProd', 'ReduceSum', 'Slice', 'Pad',
'Squeeze', 'Upsample', 'Reshape', 'Conv', 'ConvTranspose'])
# Adding extra dimension of batch_size 1 if the batch_size is different for multiple inputs.
for idx, input_name in enumerate(data_names):
batch_size = 1
if len(inputs[idx].shape) < 4 and len(inputs) > 1 and \
len(set(x.shape[0] for x in inputs)) != 1:
tuples = ((batch_size,), inputs[idx].shape)
new_shape = sum(tuples, ())
data_shapes.append((input_name, new_shape))
else:
data_shapes.append((input_name, inputs[idx].shape))
# create module, passing cpu context
if device == 'CPU':
ctx = mx.cpu()
else:
raise NotImplementedError("Only CPU context is supported for now")
# create a module
mod = mx.mod.Module(symbol=sym, data_names=data_names, context=ctx, label_names=None)
mod.bind(for_training=False, data_shapes=data_shapes, label_shapes=None)
# initializing parameters for calculating result of each individual node
mod.init_params()
data_forward = []
for idx, input_name in enumerate(data_names):
# slice and pad operator tests needs 1 less dimension in forward pass
# otherwise it will throw an error.
# for squeeze operator, need to retain shape of input as provided
val = inputs[idx]
if node.op_type in reduce_op_types:
data_forward.append(mx.nd.array(val))
else:
data_forward.append(mx.nd.array([val]))
mod.forward(mx.io.DataBatch(data_forward))
result = mod.get_outputs()[0].asnumpy()
if node.op_type in reduce_op_types:
return [result]
return result
@classmethod
@classmethod
def supports_device(cls, device):
"""Supports only CPU for testing"""
return device == 'CPU'
|
onnx/onnx-mxnet | onnx_mxnet/import_helper.py | _revert_caffe2_pad | python | def _revert_caffe2_pad(attr):
if len(attr) == 4:
attr = attr[:2]
elif len(attr) == 2:
pass
else:
raise ValueError("Invalid caffe2 type padding: {}".format(attr))
return attr | Removing extra padding from Caffe2. | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_helper.py#L19-L27 | null | # Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
# Derived from Apache 2.0 licensed onnx.py file from DMLC NNVM:
# https://github.com/dmlc/nnvm/blob/3da53e46db57c438b05fbebe8aa332ee8c5994d1/python/nnvm/frontend/onnx.py
# coding: utf-8
# pylint: disable=invalid-name
"""Operator attributes conversion"""
from onnx_mxnet.common import Renamer, AttributeConverter as AttrCvt
def _math_name_picker(surfix):
def _impl(attr):
if attr.get('broadcast', 0):
return 'broadcast_' + surfix
return 'elemwise_' + surfix
return _impl
def _broadcast_constraint():
def _broadcast_check(attrs):
if attrs.get('axis', None):
return False
return True
return _broadcast_check, "Specifying broadcast axis not allowed."
def _dimension_constraint():
"""checking dimensions for conv, deconv, pooling operators"""
def _dim_check(attrs):
if len(attrs['kernel_shape']) == 2:
return True
return False
return _dim_check, "Only 2d kernel supported."
def _elemwise(name):
"""converting attributes for add operator"""
return AttrCvt(
op_name=_math_name_picker(name),
disables=['axis'],
ignores=['broadcast'])
def _pooling(name):
"""converting attributes for pooling operator"""
return AttrCvt(
op_name='Pooling',
transforms={
'kernel_shape': 'kernel',
'strides': 'stride',
'pads': 'pad'},
# pooling convention full to match caffe2
extras={'pool_type': name, 'pooling_convention':'valid'},
custom_check=_dimension_constraint())
def _conv():
"""converting attributes for convolution operator"""
return AttrCvt(
op_name='Convolution',
transforms={
'kernel_shape': 'kernel',
'strides': 'stride',
'dilations': ('dilate', (0, 0)),
'pads': ('pad', (0, 0), _revert_caffe2_pad),
'group': ('num_group', 1)},
custom_check=_dimension_constraint())
def _conv_transpose():
"""converting attributes for deconvolution operator"""
return AttrCvt(
op_name='Deconvolution',
transforms={
'kernel_shape': 'kernel',
'strides': 'stride',
'dilations': ('dilate', (0, 0)),
'pads': ('pad', (0, 0), _revert_caffe2_pad),
'group': ('num_group', 1)},
disables=['output_shape'],
custom_check=_dimension_constraint())
def _batch_norm():
"""converting attributes for BatchNorm operator"""
return AttrCvt(
op_name='BatchNorm',
transforms={'epsilon': 'eps'},
extras={'cudnn_off': 1},
ignores=['spatial', 'is_test', 'consumed_inputs'])
def _activation(name):
"""converting attributes for LeakyRelu operator"""
return AttrCvt(
op_name='LeakyReLU',
transforms={
'alpha':'slope'},
extras={'act_type': name})
def _pad_sequence_fix(attr, kernelDim=None):
"""Changing onnx's pads sequence to match with mxnet's pad_width
mxnet: (x1_begin, x1_end, ... , xn_begin, xn_end)
onnx: (x1_begin, x2_begin, ... , xn_end, xn_end)"""
new_attr = ()
if len(attr) % 2 == 0:
for index in range(int(len(attr) / 2)):
new_attr = new_attr + attr[index::int(len(attr) / 2)]
# Making sure pad values are in the attr for all axes.
if kernelDim is not None:
while len(new_attr) < kernelDim*2:
new_attr = new_attr + (0, 0)
return new_attr
def _pad():
"""converting attributes for Pad operator"""
return AttrCvt(
op_name='pad',
transforms={
'pads': ('pad_width', (0, 0, 0, 0, 0, 0, 0, 0), _pad_sequence_fix),
'value': 'constant_value'})
def _global_pooling(name):
"""Requires kernel attribute which is not present in onnx currently.
So for now giving default kernel."""
return AttrCvt(
op_name='Pooling',
extras={'global_pool': True,
'kernel': (1, 1),
'pool_type': name})
def _upsample_scale_fix(attr):
"""Scale attribute conversion from float to int"""
return int(attr)
def _upsample_restrict_mode(attr):
"""Mxnet's current UpSampling operator doesn't work well in bilinear mode.
New operator is coming in this PR https://github.com/apache/incubator-mxnet/pull/9688/
Issue to track this: https://github.com/onnx/onnx-mxnet/issues/33
For now, only nearest mode is enabled."""
if attr.decode() != 'nearest':
raise ValueError("Only nearest mode is supported: {}".format(attr))
return attr.decode()
def _upsample(name):
"""converting attributes for UpSampling operator"""
return AttrCvt(
op_name=name,
transforms={'height_scale': ('scale', 1, _upsample_scale_fix),
'mode': ('sample_type', 'nearest', _upsample_restrict_mode),
'width_scale': ('scale', 1, _upsample_scale_fix)})
# compatible operators that do NOT require any conversion.
_identity_list = []
# _convert_map defines maps of name to converter functor(callable)
_convert_map = {
# defs/experimental
'FC' : AttrCvt('FullyConnected', ignores=['axis', 'axis_w']),
# defs/generator
'Constant': Renamer('identity'),
'RandomUniform' : AttrCvt('random_uniform', ignores=['seed']),
'RandomNormal' : AttrCvt('random_normal', {'mean':'loc'}, ignores=['seed']),
'RandomUniformLike' : AttrCvt('random_uniform', ignores=['seed']),
'RandomNormalLike': AttrCvt('random_normal', {'mean':'loc'}, ignores=['seed']),
# defs/logical
# defs/math
'Add' : _elemwise('add'),
'Sub' : _elemwise('sub'),
'Mul' : _elemwise('mul'),
'Div' : _elemwise('div'),
'Neg' : Renamer('negative'),
'Abs' : Renamer('abs'),
'Reciprocal' : Renamer('reciprocal'),
'Floor' : Renamer('floor'),
'Ceil' : Renamer('ceil'),
'Sqrt' : Renamer('sqrt'),
'Gemm' : AttrCvt('linalg_gemm', {'transA':'transpose_a', 'transB':'transpose_b'},
ignores=['broadcast']),
'Relu' : Renamer('relu'),
'LeakyRelu' : AttrCvt('LeakyReLU', {'alpha': 'slope'}),
# 'Selu'
'Elu' : _activation('elu'),
'Exp' : Renamer('exp'),
'Log' : Renamer('log'),
'Tanh' : Renamer('tanh'),
'Pow' : AttrCvt('pow', {'exponent':'exp'}),
'Dot' : Renamer('dot'),
'MatMul' : Renamer('linalg_gemm2'),
# 'PRelu'
'Sigmoid' : Renamer('sigmoid'),
'Max' : Renamer('maximum'), #elemwise maximum
'Min' : Renamer('minimum'), #elemwise minimum
'Sum' : Renamer('add_n'), #elemwise sum
# softmax default axis is different in onnx
'Softmax' : AttrCvt('softmax', extras={'axis': 1}),
# defs/nn
'AveragePool' : _pooling('avg'),
'MaxPool' : _pooling('max'),
'Conv' : _conv(),
'ConvTranspose' : _conv_transpose(),
'GlobalAveragePool': _global_pooling('avg'),
'GlobalMaxPool' : _global_pooling('max'),
'BatchNormalization': _batch_norm(),
'SpatialBN' : _batch_norm(),
'Dropout' : AttrCvt('Dropout', {'ratio': 'p'}, ignores=['is_test']),
'Flatten' : Renamer('flatten'),
'LRN' : AttrCvt('LRN', {'bias': 'knorm', 'size' : 'nsize'}),
# defs/reduction
'ReduceMax' : AttrCvt('max', {'axes': 'axis'}),
'ReduceMin' : AttrCvt('min', {'axes': 'axis'}),
'ReduceSum' : AttrCvt('sum', {'axes': 'axis'}),
'ReduceMean' : AttrCvt('mean', {'axes': 'axis'}),
'ReduceProd' : AttrCvt('prod', {'axes': 'axis'}),
# 'ReduceLogSumExp'
'ArgMax' : Renamer('argmax'),
'ArgMin' : Renamer('argmin'),
# defs/tensor
'Cast' : AttrCvt('cast', {'to': 'dtype'}),
'Reshape' : Renamer('reshape'),
'Concat' : AttrCvt('concat', {'axis': 'dim'}),
'Split' : AttrCvt('split', {'split': 'num_outputs'}),
'Pad' : _pad(),
'Slice' : AttrCvt('slice_axis', {'axes': 'axis', 'ends': 'end', 'starts': 'begin'}),
'Transpose' : AttrCvt('transpose', {'perm': 'axes'}),
'Squeeze' : AttrCvt('split', {'axes': 'axis'}),
# 'Gather'
'Upsample' : _upsample('UpSampling')
}
|
onnx/onnx-mxnet | onnx_mxnet/import_helper.py | _pad_sequence_fix | python | def _pad_sequence_fix(attr, kernelDim=None):
new_attr = ()
if len(attr) % 2 == 0:
for index in range(int(len(attr) / 2)):
new_attr = new_attr + attr[index::int(len(attr) / 2)]
# Making sure pad values are in the attr for all axes.
if kernelDim is not None:
while len(new_attr) < kernelDim*2:
new_attr = new_attr + (0, 0)
return new_attr | Changing onnx's pads sequence to match with mxnet's pad_width
mxnet: (x1_begin, x1_end, ... , xn_begin, xn_end)
onnx: (x1_begin, x2_begin, ... , xn_end, xn_end) | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/import_helper.py#L111-L124 | null | # Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
# Derived from Apache 2.0 licensed onnx.py file from DMLC NNVM:
# https://github.com/dmlc/nnvm/blob/3da53e46db57c438b05fbebe8aa332ee8c5994d1/python/nnvm/frontend/onnx.py
# coding: utf-8
# pylint: disable=invalid-name
"""Operator attributes conversion"""
from onnx_mxnet.common import Renamer, AttributeConverter as AttrCvt
def _revert_caffe2_pad(attr):
"""Removing extra padding from Caffe2."""
if len(attr) == 4:
attr = attr[:2]
elif len(attr) == 2:
pass
else:
raise ValueError("Invalid caffe2 type padding: {}".format(attr))
return attr
def _math_name_picker(surfix):
def _impl(attr):
if attr.get('broadcast', 0):
return 'broadcast_' + surfix
return 'elemwise_' + surfix
return _impl
def _broadcast_constraint():
def _broadcast_check(attrs):
if attrs.get('axis', None):
return False
return True
return _broadcast_check, "Specifying broadcast axis not allowed."
def _dimension_constraint():
"""checking dimensions for conv, deconv, pooling operators"""
def _dim_check(attrs):
if len(attrs['kernel_shape']) == 2:
return True
return False
return _dim_check, "Only 2d kernel supported."
def _elemwise(name):
"""converting attributes for add operator"""
return AttrCvt(
op_name=_math_name_picker(name),
disables=['axis'],
ignores=['broadcast'])
def _pooling(name):
"""converting attributes for pooling operator"""
return AttrCvt(
op_name='Pooling',
transforms={
'kernel_shape': 'kernel',
'strides': 'stride',
'pads': 'pad'},
# pooling convention full to match caffe2
extras={'pool_type': name, 'pooling_convention':'valid'},
custom_check=_dimension_constraint())
def _conv():
"""converting attributes for convolution operator"""
return AttrCvt(
op_name='Convolution',
transforms={
'kernel_shape': 'kernel',
'strides': 'stride',
'dilations': ('dilate', (0, 0)),
'pads': ('pad', (0, 0), _revert_caffe2_pad),
'group': ('num_group', 1)},
custom_check=_dimension_constraint())
def _conv_transpose():
"""converting attributes for deconvolution operator"""
return AttrCvt(
op_name='Deconvolution',
transforms={
'kernel_shape': 'kernel',
'strides': 'stride',
'dilations': ('dilate', (0, 0)),
'pads': ('pad', (0, 0), _revert_caffe2_pad),
'group': ('num_group', 1)},
disables=['output_shape'],
custom_check=_dimension_constraint())
def _batch_norm():
"""converting attributes for BatchNorm operator"""
return AttrCvt(
op_name='BatchNorm',
transforms={'epsilon': 'eps'},
extras={'cudnn_off': 1},
ignores=['spatial', 'is_test', 'consumed_inputs'])
def _activation(name):
"""converting attributes for LeakyRelu operator"""
return AttrCvt(
op_name='LeakyReLU',
transforms={
'alpha':'slope'},
extras={'act_type': name})
def _pad():
"""converting attributes for Pad operator"""
return AttrCvt(
op_name='pad',
transforms={
'pads': ('pad_width', (0, 0, 0, 0, 0, 0, 0, 0), _pad_sequence_fix),
'value': 'constant_value'})
def _global_pooling(name):
"""Requires kernel attribute which is not present in onnx currently.
So for now giving default kernel."""
return AttrCvt(
op_name='Pooling',
extras={'global_pool': True,
'kernel': (1, 1),
'pool_type': name})
def _upsample_scale_fix(attr):
"""Scale attribute conversion from float to int"""
return int(attr)
def _upsample_restrict_mode(attr):
"""Mxnet's current UpSampling operator doesn't work well in bilinear mode.
New operator is coming in this PR https://github.com/apache/incubator-mxnet/pull/9688/
Issue to track this: https://github.com/onnx/onnx-mxnet/issues/33
For now, only nearest mode is enabled."""
if attr.decode() != 'nearest':
raise ValueError("Only nearest mode is supported: {}".format(attr))
return attr.decode()
def _upsample(name):
"""converting attributes for UpSampling operator"""
return AttrCvt(
op_name=name,
transforms={'height_scale': ('scale', 1, _upsample_scale_fix),
'mode': ('sample_type', 'nearest', _upsample_restrict_mode),
'width_scale': ('scale', 1, _upsample_scale_fix)})
# compatible operators that do NOT require any conversion.
_identity_list = []
# _convert_map defines maps of name to converter functor(callable)
_convert_map = {
# defs/experimental
'FC' : AttrCvt('FullyConnected', ignores=['axis', 'axis_w']),
# defs/generator
'Constant': Renamer('identity'),
'RandomUniform' : AttrCvt('random_uniform', ignores=['seed']),
'RandomNormal' : AttrCvt('random_normal', {'mean':'loc'}, ignores=['seed']),
'RandomUniformLike' : AttrCvt('random_uniform', ignores=['seed']),
'RandomNormalLike': AttrCvt('random_normal', {'mean':'loc'}, ignores=['seed']),
# defs/logical
# defs/math
'Add' : _elemwise('add'),
'Sub' : _elemwise('sub'),
'Mul' : _elemwise('mul'),
'Div' : _elemwise('div'),
'Neg' : Renamer('negative'),
'Abs' : Renamer('abs'),
'Reciprocal' : Renamer('reciprocal'),
'Floor' : Renamer('floor'),
'Ceil' : Renamer('ceil'),
'Sqrt' : Renamer('sqrt'),
'Gemm' : AttrCvt('linalg_gemm', {'transA':'transpose_a', 'transB':'transpose_b'},
ignores=['broadcast']),
'Relu' : Renamer('relu'),
'LeakyRelu' : AttrCvt('LeakyReLU', {'alpha': 'slope'}),
# 'Selu'
'Elu' : _activation('elu'),
'Exp' : Renamer('exp'),
'Log' : Renamer('log'),
'Tanh' : Renamer('tanh'),
'Pow' : AttrCvt('pow', {'exponent':'exp'}),
'Dot' : Renamer('dot'),
'MatMul' : Renamer('linalg_gemm2'),
# 'PRelu'
'Sigmoid' : Renamer('sigmoid'),
'Max' : Renamer('maximum'), #elemwise maximum
'Min' : Renamer('minimum'), #elemwise minimum
'Sum' : Renamer('add_n'), #elemwise sum
# softmax default axis is different in onnx
'Softmax' : AttrCvt('softmax', extras={'axis': 1}),
# defs/nn
'AveragePool' : _pooling('avg'),
'MaxPool' : _pooling('max'),
'Conv' : _conv(),
'ConvTranspose' : _conv_transpose(),
'GlobalAveragePool': _global_pooling('avg'),
'GlobalMaxPool' : _global_pooling('max'),
'BatchNormalization': _batch_norm(),
'SpatialBN' : _batch_norm(),
'Dropout' : AttrCvt('Dropout', {'ratio': 'p'}, ignores=['is_test']),
'Flatten' : Renamer('flatten'),
'LRN' : AttrCvt('LRN', {'bias': 'knorm', 'size' : 'nsize'}),
# defs/reduction
'ReduceMax' : AttrCvt('max', {'axes': 'axis'}),
'ReduceMin' : AttrCvt('min', {'axes': 'axis'}),
'ReduceSum' : AttrCvt('sum', {'axes': 'axis'}),
'ReduceMean' : AttrCvt('mean', {'axes': 'axis'}),
'ReduceProd' : AttrCvt('prod', {'axes': 'axis'}),
# 'ReduceLogSumExp'
'ArgMax' : Renamer('argmax'),
'ArgMin' : Renamer('argmin'),
# defs/tensor
'Cast' : AttrCvt('cast', {'to': 'dtype'}),
'Reshape' : Renamer('reshape'),
'Concat' : AttrCvt('concat', {'axis': 'dim'}),
'Split' : AttrCvt('split', {'split': 'num_outputs'}),
'Pad' : _pad(),
'Slice' : AttrCvt('slice_axis', {'axes': 'axis', 'ends': 'end', 'starts': 'begin'}),
'Transpose' : AttrCvt('transpose', {'perm': 'axes'}),
'Squeeze' : AttrCvt('split', {'axes': 'axis'}),
# 'Gather'
'Upsample' : _upsample('UpSampling')
}
|
onnx/onnx-mxnet | onnx_mxnet/__init__.py | import_model | python | def import_model(model_file):
graph = GraphProto()
# loads model file and returns ONNX protobuf object
model_proto = onnx.load(model_file)
sym, params = graph.from_onnx(model_proto.graph)
return sym, params | Imports the supplied ONNX model file into MXNet symbol and parameters.
Parameters
----------
model_file : ONNX model file name
Returns
-------
sym : mx.symbol
Compatible mxnet symbol
params : dict of str to mx.ndarray
Dict of converted parameters stored in mx.ndarray format | train | https://github.com/onnx/onnx-mxnet/blob/b602d75c5a01f5ed8f68b11150a06374f058a86b/onnx_mxnet/__init__.py#L16-L36 | [
"def from_onnx(self, graph):\n \"\"\"Construct symbol from onnx graph.\n The inputs from onnx graph is vague, only providing \"1\", \"2\"...\n For convenience, we rename the `real` input names to \"input_0\",\n \"input_1\"... And renaming parameters to \"param_0\", \"param_1\"...\n\n Parameters\n ... | # Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
# http://www.apache.org/licenses/LICENSE-2.0
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
# coding: utf-8
"""import function"""
import onnx
from .import_onnx import GraphProto
|
timknip/pycsg | csg/geom.py | Vector.plus | python | def plus(self, a):
return Vector(self.x+a.x, self.y+a.y, self.z+a.z) | Add. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L50-L52 | null | class Vector(object):
"""
class Vector
Represents a 3D vector.
Example usage:
Vector(1, 2, 3);
Vector([1, 2, 3]);
Vector({ 'x': 1, 'y': 2, 'z': 3 });
"""
def __init__(self, *args):
self.x, self.y, self.z = 0., 0., 0.
if len(args) == 3:
self.x = args[0]
self.y = args[1]
self.z = args[2]
elif len(args) == 1:
a = args[0]
if isinstance(a, dict):
self.x = a.get('x', 0.0)
self.y = a.get('y', 0.0)
self.z = a.get('z', 0.0)
elif a is not None and len(a) == 3:
self.x = a[0]
self.y = a[1]
self.z = a[2]
def __repr__(self):
return '({0}, {1}, {2})'.format(self.x, self.y, self.z)
def clone(self):
""" Clone. """
return Vector(self.x, self.y, self.z)
def negated(self):
""" Negated. """
return Vector(-self.x, -self.y, -self.z)
def __neg__(self):
return self.negated()
def __add__(self, a):
return self.plus(a)
def minus(self, a):
""" Subtract. """
return Vector(self.x-a.x, self.y-a.y, self.z-a.z)
def __sub__(self, a):
return self.minus(a)
def times(self, a):
""" Multiply. """
return Vector(self.x*a, self.y*a, self.z*a)
def __mul__(self, a):
return self.times(a)
def dividedBy(self, a):
""" Divide. """
return Vector(self.x/a, self.y/a, self.z/a)
def __truediv__(self, a):
return self.dividedBy(float(a))
def __div__(self, a):
return self.dividedBy(float(a))
def dot(self, a):
""" Dot. """
return self.x*a.x + self.y*a.y + self.z*a.z
def lerp(self, a, t):
""" Lerp. Linear interpolation from self to a"""
return self.plus(a.minus(self).times(t));
def length(self):
""" Length. """
return math.sqrt(self.dot(self))
def unit(self):
""" Normalize. """
return self.dividedBy(self.length())
def cross(self, a):
""" Cross. """
return Vector(
self.y * a.z - self.z * a.y,
self.z * a.x - self.x * a.z,
self.x * a.y - self.y * a.x)
def __getitem__(self, key):
return (self.x, self.y, self.z)[key]
def __setitem__(self, key, value):
l = [self.x, self.y, self.z]
l[key] = value
self.x, self.y, self.z = l
def __len__(self):
return 3
def __iter__(self):
return iter((self.x, self.y, self.z))
def __repr__(self):
return 'Vector(%.2f, %.2f, %0.2f)' % (self.x, self.y, self.z)
|
timknip/pycsg | csg/geom.py | Vector.minus | python | def minus(self, a):
return Vector(self.x-a.x, self.y-a.y, self.z-a.z) | Subtract. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L57-L59 | null | class Vector(object):
"""
class Vector
Represents a 3D vector.
Example usage:
Vector(1, 2, 3);
Vector([1, 2, 3]);
Vector({ 'x': 1, 'y': 2, 'z': 3 });
"""
def __init__(self, *args):
self.x, self.y, self.z = 0., 0., 0.
if len(args) == 3:
self.x = args[0]
self.y = args[1]
self.z = args[2]
elif len(args) == 1:
a = args[0]
if isinstance(a, dict):
self.x = a.get('x', 0.0)
self.y = a.get('y', 0.0)
self.z = a.get('z', 0.0)
elif a is not None and len(a) == 3:
self.x = a[0]
self.y = a[1]
self.z = a[2]
def __repr__(self):
return '({0}, {1}, {2})'.format(self.x, self.y, self.z)
def clone(self):
""" Clone. """
return Vector(self.x, self.y, self.z)
def negated(self):
""" Negated. """
return Vector(-self.x, -self.y, -self.z)
def __neg__(self):
return self.negated()
def plus(self, a):
""" Add. """
return Vector(self.x+a.x, self.y+a.y, self.z+a.z)
def __add__(self, a):
return self.plus(a)
def __sub__(self, a):
return self.minus(a)
def times(self, a):
""" Multiply. """
return Vector(self.x*a, self.y*a, self.z*a)
def __mul__(self, a):
return self.times(a)
def dividedBy(self, a):
""" Divide. """
return Vector(self.x/a, self.y/a, self.z/a)
def __truediv__(self, a):
return self.dividedBy(float(a))
def __div__(self, a):
return self.dividedBy(float(a))
def dot(self, a):
""" Dot. """
return self.x*a.x + self.y*a.y + self.z*a.z
def lerp(self, a, t):
""" Lerp. Linear interpolation from self to a"""
return self.plus(a.minus(self).times(t));
def length(self):
""" Length. """
return math.sqrt(self.dot(self))
def unit(self):
""" Normalize. """
return self.dividedBy(self.length())
def cross(self, a):
""" Cross. """
return Vector(
self.y * a.z - self.z * a.y,
self.z * a.x - self.x * a.z,
self.x * a.y - self.y * a.x)
def __getitem__(self, key):
return (self.x, self.y, self.z)[key]
def __setitem__(self, key, value):
l = [self.x, self.y, self.z]
l[key] = value
self.x, self.y, self.z = l
def __len__(self):
return 3
def __iter__(self):
return iter((self.x, self.y, self.z))
def __repr__(self):
return 'Vector(%.2f, %.2f, %0.2f)' % (self.x, self.y, self.z)
|
timknip/pycsg | csg/geom.py | Vector.times | python | def times(self, a):
return Vector(self.x*a, self.y*a, self.z*a) | Multiply. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L64-L66 | null | class Vector(object):
"""
class Vector
Represents a 3D vector.
Example usage:
Vector(1, 2, 3);
Vector([1, 2, 3]);
Vector({ 'x': 1, 'y': 2, 'z': 3 });
"""
def __init__(self, *args):
self.x, self.y, self.z = 0., 0., 0.
if len(args) == 3:
self.x = args[0]
self.y = args[1]
self.z = args[2]
elif len(args) == 1:
a = args[0]
if isinstance(a, dict):
self.x = a.get('x', 0.0)
self.y = a.get('y', 0.0)
self.z = a.get('z', 0.0)
elif a is not None and len(a) == 3:
self.x = a[0]
self.y = a[1]
self.z = a[2]
def __repr__(self):
return '({0}, {1}, {2})'.format(self.x, self.y, self.z)
def clone(self):
""" Clone. """
return Vector(self.x, self.y, self.z)
def negated(self):
""" Negated. """
return Vector(-self.x, -self.y, -self.z)
def __neg__(self):
return self.negated()
def plus(self, a):
""" Add. """
return Vector(self.x+a.x, self.y+a.y, self.z+a.z)
def __add__(self, a):
return self.plus(a)
def minus(self, a):
""" Subtract. """
return Vector(self.x-a.x, self.y-a.y, self.z-a.z)
def __sub__(self, a):
return self.minus(a)
def __mul__(self, a):
return self.times(a)
def dividedBy(self, a):
""" Divide. """
return Vector(self.x/a, self.y/a, self.z/a)
def __truediv__(self, a):
return self.dividedBy(float(a))
def __div__(self, a):
return self.dividedBy(float(a))
def dot(self, a):
""" Dot. """
return self.x*a.x + self.y*a.y + self.z*a.z
def lerp(self, a, t):
""" Lerp. Linear interpolation from self to a"""
return self.plus(a.minus(self).times(t));
def length(self):
""" Length. """
return math.sqrt(self.dot(self))
def unit(self):
""" Normalize. """
return self.dividedBy(self.length())
def cross(self, a):
""" Cross. """
return Vector(
self.y * a.z - self.z * a.y,
self.z * a.x - self.x * a.z,
self.x * a.y - self.y * a.x)
def __getitem__(self, key):
return (self.x, self.y, self.z)[key]
def __setitem__(self, key, value):
l = [self.x, self.y, self.z]
l[key] = value
self.x, self.y, self.z = l
def __len__(self):
return 3
def __iter__(self):
return iter((self.x, self.y, self.z))
def __repr__(self):
return 'Vector(%.2f, %.2f, %0.2f)' % (self.x, self.y, self.z)
|
timknip/pycsg | csg/geom.py | Vector.dividedBy | python | def dividedBy(self, a):
return Vector(self.x/a, self.y/a, self.z/a) | Divide. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L71-L73 | null | class Vector(object):
"""
class Vector
Represents a 3D vector.
Example usage:
Vector(1, 2, 3);
Vector([1, 2, 3]);
Vector({ 'x': 1, 'y': 2, 'z': 3 });
"""
def __init__(self, *args):
self.x, self.y, self.z = 0., 0., 0.
if len(args) == 3:
self.x = args[0]
self.y = args[1]
self.z = args[2]
elif len(args) == 1:
a = args[0]
if isinstance(a, dict):
self.x = a.get('x', 0.0)
self.y = a.get('y', 0.0)
self.z = a.get('z', 0.0)
elif a is not None and len(a) == 3:
self.x = a[0]
self.y = a[1]
self.z = a[2]
def __repr__(self):
return '({0}, {1}, {2})'.format(self.x, self.y, self.z)
def clone(self):
""" Clone. """
return Vector(self.x, self.y, self.z)
def negated(self):
""" Negated. """
return Vector(-self.x, -self.y, -self.z)
def __neg__(self):
return self.negated()
def plus(self, a):
""" Add. """
return Vector(self.x+a.x, self.y+a.y, self.z+a.z)
def __add__(self, a):
return self.plus(a)
def minus(self, a):
""" Subtract. """
return Vector(self.x-a.x, self.y-a.y, self.z-a.z)
def __sub__(self, a):
return self.minus(a)
def times(self, a):
""" Multiply. """
return Vector(self.x*a, self.y*a, self.z*a)
def __mul__(self, a):
return self.times(a)
def __truediv__(self, a):
return self.dividedBy(float(a))
def __div__(self, a):
return self.dividedBy(float(a))
def dot(self, a):
""" Dot. """
return self.x*a.x + self.y*a.y + self.z*a.z
def lerp(self, a, t):
""" Lerp. Linear interpolation from self to a"""
return self.plus(a.minus(self).times(t));
def length(self):
""" Length. """
return math.sqrt(self.dot(self))
def unit(self):
""" Normalize. """
return self.dividedBy(self.length())
def cross(self, a):
""" Cross. """
return Vector(
self.y * a.z - self.z * a.y,
self.z * a.x - self.x * a.z,
self.x * a.y - self.y * a.x)
def __getitem__(self, key):
return (self.x, self.y, self.z)[key]
def __setitem__(self, key, value):
l = [self.x, self.y, self.z]
l[key] = value
self.x, self.y, self.z = l
def __len__(self):
return 3
def __iter__(self):
return iter((self.x, self.y, self.z))
def __repr__(self):
return 'Vector(%.2f, %.2f, %0.2f)' % (self.x, self.y, self.z)
|
timknip/pycsg | csg/geom.py | Vector.lerp | python | def lerp(self, a, t):
return self.plus(a.minus(self).times(t)); | Lerp. Linear interpolation from self to a | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L85-L87 | [
"def plus(self, a):\n \"\"\" Add. \"\"\"\n return Vector(self.x+a.x, self.y+a.y, self.z+a.z)\n"
] | class Vector(object):
"""
class Vector
Represents a 3D vector.
Example usage:
Vector(1, 2, 3);
Vector([1, 2, 3]);
Vector({ 'x': 1, 'y': 2, 'z': 3 });
"""
def __init__(self, *args):
self.x, self.y, self.z = 0., 0., 0.
if len(args) == 3:
self.x = args[0]
self.y = args[1]
self.z = args[2]
elif len(args) == 1:
a = args[0]
if isinstance(a, dict):
self.x = a.get('x', 0.0)
self.y = a.get('y', 0.0)
self.z = a.get('z', 0.0)
elif a is not None and len(a) == 3:
self.x = a[0]
self.y = a[1]
self.z = a[2]
def __repr__(self):
return '({0}, {1}, {2})'.format(self.x, self.y, self.z)
def clone(self):
""" Clone. """
return Vector(self.x, self.y, self.z)
def negated(self):
""" Negated. """
return Vector(-self.x, -self.y, -self.z)
def __neg__(self):
return self.negated()
def plus(self, a):
""" Add. """
return Vector(self.x+a.x, self.y+a.y, self.z+a.z)
def __add__(self, a):
return self.plus(a)
def minus(self, a):
""" Subtract. """
return Vector(self.x-a.x, self.y-a.y, self.z-a.z)
def __sub__(self, a):
return self.minus(a)
def times(self, a):
""" Multiply. """
return Vector(self.x*a, self.y*a, self.z*a)
def __mul__(self, a):
return self.times(a)
def dividedBy(self, a):
""" Divide. """
return Vector(self.x/a, self.y/a, self.z/a)
def __truediv__(self, a):
return self.dividedBy(float(a))
def __div__(self, a):
return self.dividedBy(float(a))
def dot(self, a):
""" Dot. """
return self.x*a.x + self.y*a.y + self.z*a.z
def length(self):
""" Length. """
return math.sqrt(self.dot(self))
def unit(self):
""" Normalize. """
return self.dividedBy(self.length())
def cross(self, a):
""" Cross. """
return Vector(
self.y * a.z - self.z * a.y,
self.z * a.x - self.x * a.z,
self.x * a.y - self.y * a.x)
def __getitem__(self, key):
return (self.x, self.y, self.z)[key]
def __setitem__(self, key, value):
l = [self.x, self.y, self.z]
l[key] = value
self.x, self.y, self.z = l
def __len__(self):
return 3
def __iter__(self):
return iter((self.x, self.y, self.z))
def __repr__(self):
return 'Vector(%.2f, %.2f, %0.2f)' % (self.x, self.y, self.z)
|
timknip/pycsg | csg/geom.py | Vertex.interpolate | python | def interpolate(self, other, t):
return Vertex(self.pos.lerp(other.pos, t),
self.normal.lerp(other.normal, t)) | Create a new vertex between this vertex and `other` by linearly
interpolating all properties using a parameter of `t`. Subclasses should
override this to interpolate additional properties. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L147-L154 | null | class Vertex(object):
"""
Class Vertex
Represents a vertex of a polygon. Use your own vertex class instead of this
one to provide additional features like texture coordinates and vertex
colors. Custom vertex classes need to provide a `pos` property and `clone()`,
`flip()`, and `interpolate()` methods that behave analogous to the ones
defined by `Vertex`. This class provides `normal` so convenience
functions like `CSG.sphere()` can return a smooth vertex normal, but `normal`
is not used anywhere else.
"""
def __init__(self, pos, normal=None):
self.pos = Vector(pos)
self.normal = Vector(normal)
def clone(self):
return Vertex(self.pos.clone(), self.normal.clone())
def flip(self):
"""
Invert all orientation-specific data (e.g. vertex normal). Called when the
orientation of a polygon is flipped.
"""
self.normal = self.normal.negated()
def __repr__(self):
return repr(self.pos)
|
timknip/pycsg | csg/geom.py | Plane.splitPolygon | python | def splitPolygon(self, polygon, coplanarFront, coplanarBack, front, back):
COPLANAR = 0 # all the vertices are within EPSILON distance from plane
FRONT = 1 # all the vertices are in front of the plane
BACK = 2 # all the vertices are at the back of the plane
SPANNING = 3 # some vertices are in front, some in the back
# Classify each point as well as the entire polygon into one of the above
# four classes.
polygonType = 0
vertexLocs = []
numVertices = len(polygon.vertices)
for i in range(numVertices):
t = self.normal.dot(polygon.vertices[i].pos) - self.w
loc = -1
if t < -Plane.EPSILON:
loc = BACK
elif t > Plane.EPSILON:
loc = FRONT
else:
loc = COPLANAR
polygonType |= loc
vertexLocs.append(loc)
# Put the polygon in the correct list, splitting it when necessary.
if polygonType == COPLANAR:
normalDotPlaneNormal = self.normal.dot(polygon.plane.normal)
if normalDotPlaneNormal > 0:
coplanarFront.append(polygon)
else:
coplanarBack.append(polygon)
elif polygonType == FRONT:
front.append(polygon)
elif polygonType == BACK:
back.append(polygon)
elif polygonType == SPANNING:
f = []
b = []
for i in range(numVertices):
j = (i+1) % numVertices
ti = vertexLocs[i]
tj = vertexLocs[j]
vi = polygon.vertices[i]
vj = polygon.vertices[j]
if ti != BACK:
f.append(vi)
if ti != FRONT:
if ti != BACK:
b.append(vi.clone())
else:
b.append(vi)
if (ti | tj) == SPANNING:
# interpolation weight at the intersection point
t = (self.w - self.normal.dot(vi.pos)) / self.normal.dot(vj.pos.minus(vi.pos))
# intersection point on the plane
v = vi.interpolate(vj, t)
f.append(v)
b.append(v.clone())
if len(f) >= 3:
front.append(Polygon(f, polygon.shared))
if len(b) >= 3:
back.append(Polygon(b, polygon.shared)) | Split `polygon` by this plane if needed, then put the polygon or polygon
fragments in the appropriate lists. Coplanar polygons go into either
`coplanarFront` or `coplanarBack` depending on their orientation with
respect to this plane. Polygons in front or in back of this plane go into
either `front` or `back` | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L192-L260 | null | class Plane(object):
"""
class Plane
Represents a plane in 3D space.
"""
"""
`Plane.EPSILON` is the tolerance used by `splitPolygon()` to decide if a
point is on the plane.
"""
EPSILON = 1.e-5
def __init__(self, normal, w):
self.normal = normal
# w is the (perpendicular) distance of the plane from (0, 0, 0)
self.w = w
@classmethod
def fromPoints(cls, a, b, c):
n = b.minus(a).cross(c.minus(a)).unit()
return Plane(n, n.dot(a))
def clone(self):
return Plane(self.normal.clone(), self.w)
def flip(self):
self.normal = self.normal.negated()
self.w = -self.w
def __repr__(self):
return 'normal: {0} w: {1}'.format(self.normal, self.w)
def splitPolygon(self, polygon, coplanarFront, coplanarBack, front, back):
"""
Split `polygon` by this plane if needed, then put the polygon or polygon
fragments in the appropriate lists. Coplanar polygons go into either
`coplanarFront` or `coplanarBack` depending on their orientation with
respect to this plane. Polygons in front or in back of this plane go into
either `front` or `back`
"""
COPLANAR = 0 # all the vertices are within EPSILON distance from plane
FRONT = 1 # all the vertices are in front of the plane
BACK = 2 # all the vertices are at the back of the plane
SPANNING = 3 # some vertices are in front, some in the back
# Classify each point as well as the entire polygon into one of the above
# four classes.
polygonType = 0
vertexLocs = []
numVertices = len(polygon.vertices)
for i in range(numVertices):
t = self.normal.dot(polygon.vertices[i].pos) - self.w
loc = -1
if t < -Plane.EPSILON:
loc = BACK
elif t > Plane.EPSILON:
loc = FRONT
else:
loc = COPLANAR
polygonType |= loc
vertexLocs.append(loc)
# Put the polygon in the correct list, splitting it when necessary.
if polygonType == COPLANAR:
normalDotPlaneNormal = self.normal.dot(polygon.plane.normal)
if normalDotPlaneNormal > 0:
coplanarFront.append(polygon)
else:
coplanarBack.append(polygon)
elif polygonType == FRONT:
front.append(polygon)
elif polygonType == BACK:
back.append(polygon)
elif polygonType == SPANNING:
f = []
b = []
for i in range(numVertices):
j = (i+1) % numVertices
ti = vertexLocs[i]
tj = vertexLocs[j]
vi = polygon.vertices[i]
vj = polygon.vertices[j]
if ti != BACK:
f.append(vi)
if ti != FRONT:
if ti != BACK:
b.append(vi.clone())
else:
b.append(vi)
if (ti | tj) == SPANNING:
# interpolation weight at the intersection point
t = (self.w - self.normal.dot(vi.pos)) / self.normal.dot(vj.pos.minus(vi.pos))
# intersection point on the plane
v = vi.interpolate(vj, t)
f.append(v)
b.append(v.clone())
if len(f) >= 3:
front.append(Polygon(f, polygon.shared))
if len(b) >= 3:
back.append(Polygon(b, polygon.shared))
|
timknip/pycsg | csg/geom.py | BSPNode.invert | python | def invert(self):
for poly in self.polygons:
poly.flip()
self.plane.flip()
if self.front:
self.front.invert()
if self.back:
self.back.invert()
temp = self.front
self.front = self.back
self.back = temp | Convert solid space to empty space and empty space to solid space. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L323-L336 | null | class BSPNode(object):
"""
class BSPNode
Holds a node in a BSP tree. A BSP tree is built from a collection of polygons
by picking a polygon to split along. That polygon (and all other coplanar
polygons) are added directly to that node and the other polygons are added to
the front and/or back subtrees. This is not a leafy BSP tree since there is
no distinction between internal and leaf nodes.
"""
def __init__(self, polygons=None):
self.plane = None # Plane instance
self.front = None # BSPNode
self.back = None # BSPNode
self.polygons = []
if polygons:
self.build(polygons)
def clone(self):
node = BSPNode()
if self.plane:
node.plane = self.plane.clone()
if self.front:
node.front = self.front.clone()
if self.back:
node.back = self.back.clone()
node.polygons = list(map(lambda p: p.clone(), self.polygons))
return node
def clipPolygons(self, polygons):
"""
Recursively remove all polygons in `polygons` that are inside this BSP
tree.
"""
if not self.plane:
return polygons[:]
front = []
back = []
for poly in polygons:
self.plane.splitPolygon(poly, front, back, front, back)
if self.front:
front = self.front.clipPolygons(front)
if self.back:
back = self.back.clipPolygons(back)
else:
back = []
front.extend(back)
return front
def clipTo(self, bsp):
"""
Remove all polygons in this BSP tree that are inside the other BSP tree
`bsp`.
"""
self.polygons = bsp.clipPolygons(self.polygons)
if self.front:
self.front.clipTo(bsp)
if self.back:
self.back.clipTo(bsp)
def allPolygons(self):
"""
Return a list of all polygons in this BSP tree.
"""
polygons = self.polygons[:]
if self.front:
polygons.extend(self.front.allPolygons())
if self.back:
polygons.extend(self.back.allPolygons())
return polygons
def build(self, polygons):
"""
Build a BSP tree out of `polygons`. When called on an existing tree, the
new polygons are filtered down to the bottom of the tree and become new
nodes there. Each set of polygons is partitioned using the first polygon
(no heuristic is used to pick a good split).
"""
if len(polygons) == 0:
return
if not self.plane:
self.plane = polygons[0].plane.clone()
# add polygon to this node
self.polygons.append(polygons[0])
front = []
back = []
# split all other polygons using the first polygon's plane
for poly in polygons[1:]:
# coplanar front and back polygons go into self.polygons
self.plane.splitPolygon(poly, self.polygons, self.polygons,
front, back)
# recursively build the BSP tree
if len(front) > 0:
if not self.front:
self.front = BSPNode()
self.front.build(front)
if len(back) > 0:
if not self.back:
self.back = BSPNode()
self.back.build(back)
|
timknip/pycsg | csg/geom.py | BSPNode.clipPolygons | python | def clipPolygons(self, polygons):
if not self.plane:
return polygons[:]
front = []
back = []
for poly in polygons:
self.plane.splitPolygon(poly, front, back, front, back)
if self.front:
front = self.front.clipPolygons(front)
if self.back:
back = self.back.clipPolygons(back)
else:
back = []
front.extend(back)
return front | Recursively remove all polygons in `polygons` that are inside this BSP
tree. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L338-L360 | null | class BSPNode(object):
"""
class BSPNode
Holds a node in a BSP tree. A BSP tree is built from a collection of polygons
by picking a polygon to split along. That polygon (and all other coplanar
polygons) are added directly to that node and the other polygons are added to
the front and/or back subtrees. This is not a leafy BSP tree since there is
no distinction between internal and leaf nodes.
"""
def __init__(self, polygons=None):
self.plane = None # Plane instance
self.front = None # BSPNode
self.back = None # BSPNode
self.polygons = []
if polygons:
self.build(polygons)
def clone(self):
node = BSPNode()
if self.plane:
node.plane = self.plane.clone()
if self.front:
node.front = self.front.clone()
if self.back:
node.back = self.back.clone()
node.polygons = list(map(lambda p: p.clone(), self.polygons))
return node
def invert(self):
"""
Convert solid space to empty space and empty space to solid space.
"""
for poly in self.polygons:
poly.flip()
self.plane.flip()
if self.front:
self.front.invert()
if self.back:
self.back.invert()
temp = self.front
self.front = self.back
self.back = temp
def clipTo(self, bsp):
"""
Remove all polygons in this BSP tree that are inside the other BSP tree
`bsp`.
"""
self.polygons = bsp.clipPolygons(self.polygons)
if self.front:
self.front.clipTo(bsp)
if self.back:
self.back.clipTo(bsp)
def allPolygons(self):
"""
Return a list of all polygons in this BSP tree.
"""
polygons = self.polygons[:]
if self.front:
polygons.extend(self.front.allPolygons())
if self.back:
polygons.extend(self.back.allPolygons())
return polygons
def build(self, polygons):
"""
Build a BSP tree out of `polygons`. When called on an existing tree, the
new polygons are filtered down to the bottom of the tree and become new
nodes there. Each set of polygons is partitioned using the first polygon
(no heuristic is used to pick a good split).
"""
if len(polygons) == 0:
return
if not self.plane:
self.plane = polygons[0].plane.clone()
# add polygon to this node
self.polygons.append(polygons[0])
front = []
back = []
# split all other polygons using the first polygon's plane
for poly in polygons[1:]:
# coplanar front and back polygons go into self.polygons
self.plane.splitPolygon(poly, self.polygons, self.polygons,
front, back)
# recursively build the BSP tree
if len(front) > 0:
if not self.front:
self.front = BSPNode()
self.front.build(front)
if len(back) > 0:
if not self.back:
self.back = BSPNode()
self.back.build(back)
|
timknip/pycsg | csg/geom.py | BSPNode.clipTo | python | def clipTo(self, bsp):
self.polygons = bsp.clipPolygons(self.polygons)
if self.front:
self.front.clipTo(bsp)
if self.back:
self.back.clipTo(bsp) | Remove all polygons in this BSP tree that are inside the other BSP tree
`bsp`. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L362-L371 | [
"def clipPolygons(self, polygons):\n \"\"\" \n Recursively remove all polygons in `polygons` that are inside this BSP\n tree.\n \"\"\"\n if not self.plane: \n return polygons[:]\n\n front = []\n back = []\n for poly in polygons:\n self.plane.splitPolygon(poly, front, back, fron... | class BSPNode(object):
"""
class BSPNode
Holds a node in a BSP tree. A BSP tree is built from a collection of polygons
by picking a polygon to split along. That polygon (and all other coplanar
polygons) are added directly to that node and the other polygons are added to
the front and/or back subtrees. This is not a leafy BSP tree since there is
no distinction between internal and leaf nodes.
"""
def __init__(self, polygons=None):
self.plane = None # Plane instance
self.front = None # BSPNode
self.back = None # BSPNode
self.polygons = []
if polygons:
self.build(polygons)
def clone(self):
node = BSPNode()
if self.plane:
node.plane = self.plane.clone()
if self.front:
node.front = self.front.clone()
if self.back:
node.back = self.back.clone()
node.polygons = list(map(lambda p: p.clone(), self.polygons))
return node
def invert(self):
"""
Convert solid space to empty space and empty space to solid space.
"""
for poly in self.polygons:
poly.flip()
self.plane.flip()
if self.front:
self.front.invert()
if self.back:
self.back.invert()
temp = self.front
self.front = self.back
self.back = temp
def clipPolygons(self, polygons):
"""
Recursively remove all polygons in `polygons` that are inside this BSP
tree.
"""
if not self.plane:
return polygons[:]
front = []
back = []
for poly in polygons:
self.plane.splitPolygon(poly, front, back, front, back)
if self.front:
front = self.front.clipPolygons(front)
if self.back:
back = self.back.clipPolygons(back)
else:
back = []
front.extend(back)
return front
def allPolygons(self):
"""
Return a list of all polygons in this BSP tree.
"""
polygons = self.polygons[:]
if self.front:
polygons.extend(self.front.allPolygons())
if self.back:
polygons.extend(self.back.allPolygons())
return polygons
def build(self, polygons):
"""
Build a BSP tree out of `polygons`. When called on an existing tree, the
new polygons are filtered down to the bottom of the tree and become new
nodes there. Each set of polygons is partitioned using the first polygon
(no heuristic is used to pick a good split).
"""
if len(polygons) == 0:
return
if not self.plane:
self.plane = polygons[0].plane.clone()
# add polygon to this node
self.polygons.append(polygons[0])
front = []
back = []
# split all other polygons using the first polygon's plane
for poly in polygons[1:]:
# coplanar front and back polygons go into self.polygons
self.plane.splitPolygon(poly, self.polygons, self.polygons,
front, back)
# recursively build the BSP tree
if len(front) > 0:
if not self.front:
self.front = BSPNode()
self.front.build(front)
if len(back) > 0:
if not self.back:
self.back = BSPNode()
self.back.build(back)
|
timknip/pycsg | csg/geom.py | BSPNode.allPolygons | python | def allPolygons(self):
polygons = self.polygons[:]
if self.front:
polygons.extend(self.front.allPolygons())
if self.back:
polygons.extend(self.back.allPolygons())
return polygons | Return a list of all polygons in this BSP tree. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L373-L382 | null | class BSPNode(object):
"""
class BSPNode
Holds a node in a BSP tree. A BSP tree is built from a collection of polygons
by picking a polygon to split along. That polygon (and all other coplanar
polygons) are added directly to that node and the other polygons are added to
the front and/or back subtrees. This is not a leafy BSP tree since there is
no distinction between internal and leaf nodes.
"""
def __init__(self, polygons=None):
self.plane = None # Plane instance
self.front = None # BSPNode
self.back = None # BSPNode
self.polygons = []
if polygons:
self.build(polygons)
def clone(self):
node = BSPNode()
if self.plane:
node.plane = self.plane.clone()
if self.front:
node.front = self.front.clone()
if self.back:
node.back = self.back.clone()
node.polygons = list(map(lambda p: p.clone(), self.polygons))
return node
def invert(self):
"""
Convert solid space to empty space and empty space to solid space.
"""
for poly in self.polygons:
poly.flip()
self.plane.flip()
if self.front:
self.front.invert()
if self.back:
self.back.invert()
temp = self.front
self.front = self.back
self.back = temp
def clipPolygons(self, polygons):
"""
Recursively remove all polygons in `polygons` that are inside this BSP
tree.
"""
if not self.plane:
return polygons[:]
front = []
back = []
for poly in polygons:
self.plane.splitPolygon(poly, front, back, front, back)
if self.front:
front = self.front.clipPolygons(front)
if self.back:
back = self.back.clipPolygons(back)
else:
back = []
front.extend(back)
return front
def clipTo(self, bsp):
"""
Remove all polygons in this BSP tree that are inside the other BSP tree
`bsp`.
"""
self.polygons = bsp.clipPolygons(self.polygons)
if self.front:
self.front.clipTo(bsp)
if self.back:
self.back.clipTo(bsp)
def build(self, polygons):
"""
Build a BSP tree out of `polygons`. When called on an existing tree, the
new polygons are filtered down to the bottom of the tree and become new
nodes there. Each set of polygons is partitioned using the first polygon
(no heuristic is used to pick a good split).
"""
if len(polygons) == 0:
return
if not self.plane:
self.plane = polygons[0].plane.clone()
# add polygon to this node
self.polygons.append(polygons[0])
front = []
back = []
# split all other polygons using the first polygon's plane
for poly in polygons[1:]:
# coplanar front and back polygons go into self.polygons
self.plane.splitPolygon(poly, self.polygons, self.polygons,
front, back)
# recursively build the BSP tree
if len(front) > 0:
if not self.front:
self.front = BSPNode()
self.front.build(front)
if len(back) > 0:
if not self.back:
self.back = BSPNode()
self.back.build(back)
|
timknip/pycsg | csg/geom.py | BSPNode.build | python | def build(self, polygons):
if len(polygons) == 0:
return
if not self.plane:
self.plane = polygons[0].plane.clone()
# add polygon to this node
self.polygons.append(polygons[0])
front = []
back = []
# split all other polygons using the first polygon's plane
for poly in polygons[1:]:
# coplanar front and back polygons go into self.polygons
self.plane.splitPolygon(poly, self.polygons, self.polygons,
front, back)
# recursively build the BSP tree
if len(front) > 0:
if not self.front:
self.front = BSPNode()
self.front.build(front)
if len(back) > 0:
if not self.back:
self.back = BSPNode()
self.back.build(back) | Build a BSP tree out of `polygons`. When called on an existing tree, the
new polygons are filtered down to the bottom of the tree and become new
nodes there. Each set of polygons is partitioned using the first polygon
(no heuristic is used to pick a good split). | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/geom.py#L384-L412 | [
"def build(self, polygons):\n \"\"\"\n Build a BSP tree out of `polygons`. When called on an existing tree, the\n new polygons are filtered down to the bottom of the tree and become new\n nodes there. Each set of polygons is partitioned using the first polygon\n (no heuristic is used to pick a good s... | class BSPNode(object):
"""
class BSPNode
Holds a node in a BSP tree. A BSP tree is built from a collection of polygons
by picking a polygon to split along. That polygon (and all other coplanar
polygons) are added directly to that node and the other polygons are added to
the front and/or back subtrees. This is not a leafy BSP tree since there is
no distinction between internal and leaf nodes.
"""
def __init__(self, polygons=None):
self.plane = None # Plane instance
self.front = None # BSPNode
self.back = None # BSPNode
self.polygons = []
if polygons:
self.build(polygons)
def clone(self):
node = BSPNode()
if self.plane:
node.plane = self.plane.clone()
if self.front:
node.front = self.front.clone()
if self.back:
node.back = self.back.clone()
node.polygons = list(map(lambda p: p.clone(), self.polygons))
return node
def invert(self):
"""
Convert solid space to empty space and empty space to solid space.
"""
for poly in self.polygons:
poly.flip()
self.plane.flip()
if self.front:
self.front.invert()
if self.back:
self.back.invert()
temp = self.front
self.front = self.back
self.back = temp
def clipPolygons(self, polygons):
"""
Recursively remove all polygons in `polygons` that are inside this BSP
tree.
"""
if not self.plane:
return polygons[:]
front = []
back = []
for poly in polygons:
self.plane.splitPolygon(poly, front, back, front, back)
if self.front:
front = self.front.clipPolygons(front)
if self.back:
back = self.back.clipPolygons(back)
else:
back = []
front.extend(back)
return front
def clipTo(self, bsp):
"""
Remove all polygons in this BSP tree that are inside the other BSP tree
`bsp`.
"""
self.polygons = bsp.clipPolygons(self.polygons)
if self.front:
self.front.clipTo(bsp)
if self.back:
self.back.clipTo(bsp)
def allPolygons(self):
"""
Return a list of all polygons in this BSP tree.
"""
polygons = self.polygons[:]
if self.front:
polygons.extend(self.front.allPolygons())
if self.back:
polygons.extend(self.back.allPolygons())
return polygons
|
timknip/pycsg | csg/core.py | CSG.refine | python | def refine(self):
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG | Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L75-L111 | null | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.translate | python | def translate(self, disp):
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d) | Translate Geometry.
disp: displacement (array of floats) | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L113-L121 | null | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.rotate | python | def rotate(self, axis, angleDeg):
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal) | Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L124-L152 | [
"def unit(self):\n \"\"\" Normalize. \"\"\"\n return self.dividedBy(self.length())\n",
"def newVector(v):\n vA = v.dot(ax)\n vPerp = v.minus(ax.times(vA))\n vPerpLen = vPerp.length()\n if vPerpLen == 0:\n # vector is parallel to axis, no need to rotate\n return v\n u1 = vPerp.un... | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.toVerticesAndPolygons | python | def toVerticesAndPolygons(self):
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count | Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count). | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L154-L193 | null | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.saveVTK | python | def saveVTK(self, filename):
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n') | Save polygons in VTK file. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L195-L216 | [
"def toVerticesAndPolygons(self):\n \"\"\"\n Return list of vertices, polygons (cells), and the total\n number of vertex indices in the polygon connectivity list\n (count).\n \"\"\"\n offset = 1.234567890\n verts = []\n polys = []\n vertexIndexMap = {}\n count = 0\n for poly in self... | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.union | python | def union(self, csg):
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons()) | Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+ | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L218-L242 | [
"def fromPolygons(cls, polygons):\n csg = CSG()\n csg.polygons = polygons\n return csg\n",
"def clone(self):\n csg = CSG()\n csg.polygons = list(map(lambda p: p.clone(), self.polygons))\n return csg\n",
"def invert(self):\n \"\"\" \n Convert solid space to empty space and empty space to ... | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.inverse | python | def inverse(self):
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg | Return a new CSG solid with solid and empty space switched. This solid is
not modified. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L308-L315 | [
"def clone(self):\n csg = CSG()\n csg.polygons = list(map(lambda p: p.clone(), self.polygons))\n return csg\n"
] | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.cube | python | def cube(cls, center=[0,0,0], radius=[1,1,1]):
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons) | Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
) | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L318-L356 | [
"def fromPolygons(cls, polygons):\n csg = CSG()\n csg.polygons = polygons\n return csg\n"
] | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.sphere | python | def sphere(cls, **kwargs):
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons) | Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L359-L451 | [
"def fromPolygons(cls, polygons):\n csg = CSG()\n csg.polygons = polygons\n return csg\n",
"def appendVertex(vertices, theta, phi):\n d = Vector(\n math.cos(theta) * math.sin(phi),\n math.cos(phi),\n math.sin(theta) * math.sin(phi))\n vertices.append(Vertex(c.plus(d.times(r)), ... | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.cylinder | python | def cylinder(cls, **kwargs):
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons) | Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L454-L508 | [
"def fromPolygons(cls, polygons):\n csg = CSG()\n csg.polygons = polygons\n return csg\n",
"def minus(self, a):\n \"\"\" Subtract. \"\"\"\n return Vector(self.x-a.x, self.y-a.y, self.z-a.z)\n",
"def cross(self, a):\n \"\"\" Cross. \"\"\"\n return Vector(\n self.y * a.z - self.z * a.y... | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
timknip/pycsg | csg/core.py | CSG.cone | python | def cone(cls, **kwargs):
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons) | Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16. | train | https://github.com/timknip/pycsg/blob/b8f9710fd15c38dcc275d56a2108f604af38dcc8/csg/core.py#L511-L573 | [
"def fromPolygons(cls, polygons):\n csg = CSG()\n csg.polygons = polygons\n return csg\n",
"def minus(self, a):\n \"\"\" Subtract. \"\"\"\n return Vector(self.x-a.x, self.y-a.y, self.z-a.z)\n",
"def length(self):\n \"\"\" Length. \"\"\"\n return math.sqrt(self.dot(self))\n",
"def unit(sel... | class CSG(object):
"""
Constructive Solid Geometry (CSG) is a modeling technique that uses Boolean
operations like union and intersection to combine 3D solids. This library
implements CSG operations on meshes elegantly and concisely using BSP trees,
and is meant to serve as an easily understandable implementation of the
algorithm. All edge cases involving overlapping coplanar polygons in both
solids are correctly handled.
Example usage::
from csg.core import CSG
cube = CSG.cube();
sphere = CSG.sphere({'radius': 1.3});
polygons = cube.subtract(sphere).toPolygons();
## Implementation Details
All CSG operations are implemented in terms of two functions, `clipTo()` and
`invert()`, which remove parts of a BSP tree inside another BSP tree and swap
solid and empty space, respectively. To find the union of `a` and `b`, we
want to remove everything in `a` inside `b` and everything in `b` inside `a`,
then combine polygons from `a` and `b` into one solid::
a.clipTo(b);
b.clipTo(a);
a.build(b.allPolygons());
The only tricky part is handling overlapping coplanar polygons in both trees.
The code above keeps both copies, but we need to keep them in one tree and
remove them in the other tree. To remove them from `b` we can clip the
inverse of `b` against `a`. The code for union now looks like this::
a.clipTo(b);
b.clipTo(a);
b.invert();
b.clipTo(a);
b.invert();
a.build(b.allPolygons());
Subtraction and intersection naturally follow from set operations. If
union is `A | B`, subtraction is `A - B = ~(~A | B)` and intersection is
`A & B = ~(~A | ~B)` where `~` is the complement operator.
## License
Copyright (c) 2011 Evan Wallace (http://madebyevan.com/), under the MIT license.
Python port Copyright (c) 2012 Tim Knip (http://www.floorplanner.com), under the MIT license.
Additions by Alex Pletzer (Pennsylvania State University)
"""
def __init__(self):
self.polygons = []
@classmethod
def fromPolygons(cls, polygons):
csg = CSG()
csg.polygons = polygons
return csg
def clone(self):
csg = CSG()
csg.polygons = list(map(lambda p: p.clone(), self.polygons))
return csg
def toPolygons(self):
return self.polygons
def refine(self):
"""
Return a refined CSG. To each polygon, a middle point is added to each edge and to the center
of the polygon
"""
newCSG = CSG()
for poly in self.polygons:
verts = poly.vertices
numVerts = len(verts)
if numVerts == 0:
continue
midPos = reduce(operator.add, [v.pos for v in verts]) / float(numVerts)
midNormal = None
if verts[0].normal is not None:
midNormal = poly.plane.normal
midVert = Vertex(midPos, midNormal)
newVerts = verts + \
[verts[i].interpolate(verts[(i + 1)%numVerts], 0.5) for i in range(numVerts)] + \
[midVert]
i = 0
vs = [newVerts[i], newVerts[i+numVerts], newVerts[2*numVerts], newVerts[2*numVerts-1]]
newPoly = Polygon(vs, poly.shared)
newPoly.shared = poly.shared
newPoly.plane = poly.plane
newCSG.polygons.append(newPoly)
for i in range(1, numVerts):
vs = [newVerts[i], newVerts[numVerts+i], newVerts[2*numVerts], newVerts[numVerts+i-1]]
newPoly = Polygon(vs, poly.shared)
newCSG.polygons.append(newPoly)
return newCSG
def translate(self, disp):
"""
Translate Geometry.
disp: displacement (array of floats)
"""
d = Vector(disp[0], disp[1], disp[2])
for poly in self.polygons:
for v in poly.vertices:
v.pos = v.pos.plus(d)
# no change to the normals
def rotate(self, axis, angleDeg):
"""
Rotate geometry.
axis: axis of rotation (array of floats)
angleDeg: rotation angle in degrees
"""
ax = Vector(axis[0], axis[1], axis[2]).unit()
cosAngle = math.cos(math.pi * angleDeg / 180.)
sinAngle = math.sin(math.pi * angleDeg / 180.)
def newVector(v):
vA = v.dot(ax)
vPerp = v.minus(ax.times(vA))
vPerpLen = vPerp.length()
if vPerpLen == 0:
# vector is parallel to axis, no need to rotate
return v
u1 = vPerp.unit()
u2 = u1.cross(ax)
vCosA = vPerpLen*cosAngle
vSinA = vPerpLen*sinAngle
return ax.times(vA).plus(u1.times(vCosA).plus(u2.times(vSinA)))
for poly in self.polygons:
for vert in poly.vertices:
vert.pos = newVector(vert.pos)
normal = vert.normal
if normal.length() > 0:
vert.normal = newVector(vert.normal)
def toVerticesAndPolygons(self):
"""
Return list of vertices, polygons (cells), and the total
number of vertex indices in the polygon connectivity list
(count).
"""
offset = 1.234567890
verts = []
polys = []
vertexIndexMap = {}
count = 0
for poly in self.polygons:
verts = poly.vertices
cell = []
for v in poly.vertices:
p = v.pos
# use string key to remove degeneracy associated
# very close points. The format %.10e ensures that
# points differing in the 11 digits and higher are
# treated as the same. For instance 1.2e-10 and
# 1.3e-10 are essentially the same.
vKey = '%.10e,%.10e,%.10e' % (p[0] + offset,
p[1] + offset,
p[2] + offset)
if not vKey in vertexIndexMap:
vertexIndexMap[vKey] = len(vertexIndexMap)
index = vertexIndexMap[vKey]
cell.append(index)
count += 1
polys.append(cell)
# sort by index
sortedVertexIndex = sorted(vertexIndexMap.items(),
key=operator.itemgetter(1))
verts = []
for v, i in sortedVertexIndex:
p = []
for c in v.split(','):
p.append(float(c) - offset)
verts.append(tuple(p))
return verts, polys, count
def saveVTK(self, filename):
"""
Save polygons in VTK file.
"""
with open(filename, 'w') as f:
f.write('# vtk DataFile Version 3.0\n')
f.write('pycsg output\n')
f.write('ASCII\n')
f.write('DATASET POLYDATA\n')
verts, cells, count = self.toVerticesAndPolygons()
f.write('POINTS {0} float\n'.format(len(verts)))
for v in verts:
f.write('{0} {1} {2}\n'.format(v[0], v[1], v[2]))
numCells = len(cells)
f.write('POLYGONS {0} {1}\n'.format(numCells, count + numCells))
for cell in cells:
f.write('{0} '.format(len(cell)))
for index in cell:
f.write('{0} '.format(index))
f.write('\n')
def union(self, csg):
"""
Return a new CSG solid representing space in either this solid or in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.union(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +----+
+----+--+ | +----+ |
| B | | |
| | | |
+-------+ +-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons());
return CSG.fromPolygons(a.allPolygons())
def __add__(self, csg):
return self.union(csg)
def subtract(self, csg):
"""
Return a new CSG solid representing space in this solid but not in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.subtract(B)
+-------+ +-------+
| | | |
| A | | |
| +--+----+ = | +--+
+----+--+ | +----+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
a.clipTo(b)
b.clipTo(a)
b.invert()
b.clipTo(a)
b.invert()
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __sub__(self, csg):
return self.subtract(csg)
def intersect(self, csg):
"""
Return a new CSG solid representing space both this solid and in the
solid `csg`. Neither this solid nor the solid `csg` are modified.::
A.intersect(B)
+-------+
| |
| A |
| +--+----+ = +--+
+----+--+ | +--+
| B |
| |
+-------+
"""
a = BSPNode(self.clone().polygons)
b = BSPNode(csg.clone().polygons)
a.invert()
b.clipTo(a)
b.invert()
a.clipTo(b)
b.clipTo(a)
a.build(b.allPolygons())
a.invert()
return CSG.fromPolygons(a.allPolygons())
def __mul__(self, csg):
return self.intersect(csg)
def inverse(self):
"""
Return a new CSG solid with solid and empty space switched. This solid is
not modified.
"""
csg = self.clone()
map(lambda p: p.flip(), csg.polygons)
return csg
@classmethod
def cube(cls, center=[0,0,0], radius=[1,1,1]):
"""
Construct an axis-aligned solid cuboid. Optional parameters are `center` and
`radius`, which default to `[0, 0, 0]` and `[1, 1, 1]`. The radius can be
specified using a single number or a list of three numbers, one for each axis.
Example code::
cube = CSG.cube(
center=[0, 0, 0],
radius=1
)
"""
c = Vector(0, 0, 0)
r = [1, 1, 1]
if isinstance(center, list): c = Vector(center)
if isinstance(radius, list): r = radius
else: r = [radius, radius, radius]
polygons = list(map(
lambda v: Polygon(
list(map(lambda i:
Vertex(
Vector(
c.x + r[0] * (2 * bool(i & 1) - 1),
c.y + r[1] * (2 * bool(i & 2) - 1),
c.z + r[2] * (2 * bool(i & 4) - 1)
),
None
), v[0]))),
[
[[0, 4, 6, 2], [-1, 0, 0]],
[[1, 3, 7, 5], [+1, 0, 0]],
[[0, 1, 5, 4], [0, -1, 0]],
[[2, 6, 7, 3], [0, +1, 0]],
[[0, 2, 3, 1], [0, 0, -1]],
[[4, 5, 7, 6], [0, 0, +1]]
]))
return CSG.fromPolygons(polygons)
@classmethod
def sphere(cls, **kwargs):
""" Returns a sphere.
Kwargs:
center (list): Center of sphere, default [0, 0, 0].
radius (float): Radius of sphere, default 1.0.
slices (int): Number of slices, default 16.
stacks (int): Number of stacks, default 8.
"""
center = kwargs.get('center', [0.0, 0.0, 0.0])
if isinstance(center, float):
center = [center, center, center]
c = Vector(center)
r = kwargs.get('radius', 1.0)
if isinstance(r, list) and len(r) > 2:
r = r[0]
slices = kwargs.get('slices', 16)
stacks = kwargs.get('stacks', 8)
polygons = []
def appendVertex(vertices, theta, phi):
d = Vector(
math.cos(theta) * math.sin(phi),
math.cos(phi),
math.sin(theta) * math.sin(phi))
vertices.append(Vertex(c.plus(d.times(r)), d))
dTheta = math.pi * 2.0 / float(slices)
dPhi = math.pi / float(stacks)
j0 = 0
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +--+
# | /
# |/
# +
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j1 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
j0 = stacks - 1
j1 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 1
# +
# |\
# | \
# +--+
vertices = []
appendVertex(vertices, i0 * dTheta, j0 * dPhi)
appendVertex(vertices, i1 * dTheta, j0 * dPhi)
appendVertex(vertices, i0 * dTheta, j1 * dPhi)
polygons.append(Polygon(vertices))
for j0 in range(1, stacks - 1):
j1 = j0 + 0.5
j2 = j0 + 1
for i0 in range(0, slices):
i1 = i0 + 0.5
i2 = i0 + 1
# +---+
# |\ /|
# | x |
# |/ \|
# +---+
verticesN = []
appendVertex(verticesN, i1 * dTheta, j1 * dPhi)
appendVertex(verticesN, i2 * dTheta, j2 * dPhi)
appendVertex(verticesN, i0 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesN))
verticesS = []
appendVertex(verticesS, i1 * dTheta, j1 * dPhi)
appendVertex(verticesS, i0 * dTheta, j0 * dPhi)
appendVertex(verticesS, i2 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesS))
verticesW = []
appendVertex(verticesW, i1 * dTheta, j1 * dPhi)
appendVertex(verticesW, i0 * dTheta, j2 * dPhi)
appendVertex(verticesW, i0 * dTheta, j0 * dPhi)
polygons.append(Polygon(verticesW))
verticesE = []
appendVertex(verticesE, i1 * dTheta, j1 * dPhi)
appendVertex(verticesE, i2 * dTheta, j0 * dPhi)
appendVertex(verticesE, i2 * dTheta, j2 * dPhi)
polygons.append(Polygon(verticesE))
return CSG.fromPolygons(polygons)
@classmethod
def cylinder(cls, **kwargs):
""" Returns a cylinder.
Kwargs:
start (list): Start of cylinder, default [0, -1, 0].
end (list): End of cylinder, default [0, 1, 0].
radius (float): Radius of cylinder, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
start = Vertex(s, axisZ.negated())
end = Vertex(e, axisZ.unit())
polygons = []
def point(stack, angle, normalBlend):
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(ray.times(stack)).plus(out.times(r))
normal = out.times(1.0 - math.fabs(normalBlend)).plus(
axisZ.times(normalBlend))
return Vertex(pos, normal)
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
polygons.append(Polygon([start.clone(),
point(0., t0, -1.),
point(0., t1, -1.)]))
polygons.append(Polygon([point(0., t1, 0.),
point(0., t0, 0.),
point(1., t0, 0.),
point(1., t1, 0.)]))
polygons.append(Polygon([end.clone(),
point(1., t1, 1.),
point(1., t0, 1.)]))
return CSG.fromPolygons(polygons)
@classmethod
def cone(cls, **kwargs):
""" Returns a cone.
Kwargs:
start (list): Start of cone, default [0, -1, 0].
end (list): End of cone, default [0, 1, 0].
radius (float): Maximum radius of cone at start, default 1.0.
slices (int): Number of slices, default 16.
"""
s = kwargs.get('start', Vector(0.0, -1.0, 0.0))
e = kwargs.get('end', Vector(0.0, 1.0, 0.0))
if isinstance(s, list):
s = Vector(*s)
if isinstance(e, list):
e = Vector(*e)
r = kwargs.get('radius', 1.0)
slices = kwargs.get('slices', 16)
ray = e.minus(s)
axisZ = ray.unit()
isY = (math.fabs(axisZ.y) > 0.5)
axisX = Vector(float(isY), float(not isY), 0).cross(axisZ).unit()
axisY = axisX.cross(axisZ).unit()
startNormal = axisZ.negated()
start = Vertex(s, startNormal)
polygons = []
taperAngle = math.atan2(r, ray.length())
sinTaperAngle = math.sin(taperAngle)
cosTaperAngle = math.cos(taperAngle)
def point(angle):
# radial direction pointing out
out = axisX.times(math.cos(angle)).plus(
axisY.times(math.sin(angle)))
pos = s.plus(out.times(r))
# normal taking into account the tapering of the cone
normal = out.times(cosTaperAngle).plus(axisZ.times(sinTaperAngle))
return pos, normal
dt = math.pi * 2.0 / float(slices)
for i in range(0, slices):
t0 = i * dt
i1 = (i + 1) % slices
t1 = i1 * dt
# coordinates and associated normal pointing outwards of the cone's
# side
p0, n0 = point(t0)
p1, n1 = point(t1)
# average normal for the tip
nAvg = n0.plus(n1).times(0.5)
# polygon on the low side (disk sector)
polyStart = Polygon([start.clone(),
Vertex(p0, startNormal),
Vertex(p1, startNormal)])
polygons.append(polyStart)
# polygon extending from the low side to the tip
polySide = Polygon([Vertex(p0, n0), Vertex(e, nAvg), Vertex(p1, n1)])
polygons.append(polySide)
return CSG.fromPolygons(polygons)
|
anjishnu/ask-alexa-pykit | ask/write_sample.py | validate_input_format | python | def validate_input_format(utterance, intent):
slots = {slot["name"] for slot in intent["slots"]}
split_utt = re.split("{(.*)}", utterance)
banned = set("-/\\()^%$#@~`-_=+><;:") # Banned characters
for token in split_utt:
if (banned & set(token)):
print (" - Banned character found in substring", token)
print (" - Banned character list", banned)
return False
if "|" in token:
split_token = token.split("|")
if len(split_token)!=2:
print (" - Error, token is incorrect in", token, split_token)
return False
word, slot = split_token
if slot.strip() not in slots:
print (" -", slot, "is not a valid slot for this Intent, valid slots are", slots)
return False
return True | TODO add handling for bad input | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/write_sample.py#L25-L47 | null | from __future__ import print_function
import readline
import json
import re
from .config.config import read_from_user
from argparse import ArgumentParser
try:
from intent_schema import IntentSchema
except:
from ask.intent_schema import IntentSchema
def print_description(intent):
print ("<> Enter data for <{intent}> OR Press enter with empty string to move onto next intent"
.format(intent=intent["intent"]))
print ("<> Enter '<' to delete last training utterance")
print ("<> Sample utterance to remind you of the format:")
print (">> what is the recipe for {ravioli|Food} ?")
if len(intent["slots"]) > 0:
print ("<> Available slots for this intent")
for slot in intent["slots"]:
print (" - - ", slot["name"], "<TYPE: {}>".format(slot["type"]))
def lowercase_utterance(utterance):
split_utt = re.split("({.*})", utterance)
def lower_case_split(token):
if "|" in token:
phrase, slot = token.split("|")
return "|".join([phrase.strip().lower(), slot.strip()])
else:
return token.lower()
return " ".join([lower_case_split(token) for token in split_utt])
def generate_training_data(schema):
print ("Loaded intent schema, populating intents")
training_data = []
for intent in schema.get_intents():
print_description(intent)
keep_prompting = True
while keep_prompting:
utterance = read_from_user(str,
str(len(training_data))+". "+intent["intent"]+'\t')
if utterance.strip() == "":
keep_prompting = False
elif utterance.strip() == "<":
print (" - Discarded utterance: ", training_data.pop())
elif validate_input_format(utterance, intent):
training_data.append("\t".join([intent["intent"], lowercase_utterance(utterance)]))
else:
print (" - Discarded utterance:", utterance)
return training_data
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--intent_schema', '-i', required=True)
parser.add_argument('--output', '-o', default='utterances.txt')
args = parser.parse_args()
intent_schema = IntentSchema.from_filename(args.intent_schema)
with open(args.output, 'w') as utterance_file:
utterance_file.write("\n".join(generate_training_data(intent_schema)))
|
anjishnu/ask-alexa-pykit | ask/config/config.py | read_from_user | python | def read_from_user(input_type, *args, **kwargs):
'''
Helper function to prompt user for input of a specific type
e.g. float, str, int
Designed to work with both python 2 and 3
Yes I know this is ugly.
'''
def _read_in(*args, **kwargs):
while True:
try: tmp = raw_input(*args, **kwargs)
except NameError: tmp = input(*args, **kwargs)
try: return input_type(tmp)
except: print ('Expected type', input_type)
return _read_in(*args, **kwargs) | Helper function to prompt user for input of a specific type
e.g. float, str, int
Designed to work with both python 2 and 3
Yes I know this is ugly. | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/config/config.py#L18-L33 | [
"def _read_in(*args, **kwargs):\n while True:\n try: tmp = raw_input(*args, **kwargs)\n except NameError: tmp = input(*args, **kwargs)\n try: return input_type(tmp)\n except: print ('Expected type', input_type)\n"
] | """
This is the basic config file, encapsulating all configuration options
ALL FILES SHOULD LOAD THEIR CONFIGURATIONS FROM THIS CENTRAL LOCATION
"""
from __future__ import print_function
import os
import json
# ---- Helper Functions ----
# Get path relative to the current file
path_relative_to_file = lambda rel_path: os.path.normpath(os.path.join(os.path.dirname(__file__), rel_path))
# Load a json file as an object
load_json_schema = lambda schema_location : json.load(open(schema_location))
# Location of AMAZON.BUILTIN slot types
BUILTIN_SLOTS_LOCATION = path_relative_to_file(os.path.join('..', 'data', 'amazon_builtin_slots.tsv'))
def load_builtin_slots():
'''
Helper function to load builtin slots from the data location
'''
builtin_slots = {}
for index, line in enumerate(open(BUILTIN_SLOTS_LOCATION)):
o = line.strip().split('\t')
builtin_slots[index] = {'name' : o[0],
'description' : o[1] }
return builtin_slots
|
anjishnu/ask-alexa-pykit | ask/config/config.py | load_builtin_slots | python | def load_builtin_slots():
'''
Helper function to load builtin slots from the data location
'''
builtin_slots = {}
for index, line in enumerate(open(BUILTIN_SLOTS_LOCATION)):
o = line.strip().split('\t')
builtin_slots[index] = {'name' : o[0],
'description' : o[1] }
return builtin_slots | Helper function to load builtin slots from the data location | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/config/config.py#L38-L47 | null | """
This is the basic config file, encapsulating all configuration options
ALL FILES SHOULD LOAD THEIR CONFIGURATIONS FROM THIS CENTRAL LOCATION
"""
from __future__ import print_function
import os
import json
# ---- Helper Functions ----
# Get path relative to the current file
path_relative_to_file = lambda rel_path: os.path.normpath(os.path.join(os.path.dirname(__file__), rel_path))
# Load a json file as an object
load_json_schema = lambda schema_location : json.load(open(schema_location))
def read_from_user(input_type, *args, **kwargs):
'''
Helper function to prompt user for input of a specific type
e.g. float, str, int
Designed to work with both python 2 and 3
Yes I know this is ugly.
'''
def _read_in(*args, **kwargs):
while True:
try: tmp = raw_input(*args, **kwargs)
except NameError: tmp = input(*args, **kwargs)
try: return input_type(tmp)
except: print ('Expected type', input_type)
return _read_in(*args, **kwargs)
# Location of AMAZON.BUILTIN slot types
BUILTIN_SLOTS_LOCATION = path_relative_to_file(os.path.join('..', 'data', 'amazon_builtin_slots.tsv'))
|
anjishnu/ask-alexa-pykit | ask/alexa_io.py | ResponseBuilder.create_response | python | def create_response(self, message=None, end_session=False, card_obj=None,
reprompt_message=None, is_ssml=None):
response = dict(self.base_response)
if message:
response['response'] = self.create_speech(message, is_ssml)
response['response']['shouldEndSession'] = end_session
if card_obj:
response['response']['card'] = card_obj
if reprompt_message:
response['response']['reprompt'] = self.create_speech(reprompt_message, is_ssml)
return Response(response) | message - text message to be spoken out by the Echo
end_session - flag to determine whether this interaction should end the session
card_obj = JSON card object to substitute the 'card' field in the raw_response | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/alexa_io.py#L103-L118 | [
"def create_speech(self, message=None, is_ssml=False):\n data = {}\n if is_ssml:\n data['type'], data['ssml'] = \"SSML\", message\n else:\n data['type'] = \"PlainText\"\n data['text'] = message\n return {\"outputSpeech\": data}\n"
] | class ResponseBuilder(object):
"""
Simple class to help users to build responses
"""
base_response = eval(RAW_RESPONSE)
@classmethod
@classmethod
def respond(self, *args, **kwargs):
return self.create_response(*args, **kwargs)
@classmethod
def create_speech(self, message=None, is_ssml=False):
data = {}
if is_ssml:
data['type'], data['ssml'] = "SSML", message
else:
data['type'] = "PlainText"
data['text'] = message
return {"outputSpeech": data}
@classmethod
def create_card(self, title=None, subtitle=None, content=None, card_type="Simple"):
"""
card_obj = JSON card object to substitute the 'card' field in the raw_response
format:
{
"type": "Simple", #COMPULSORY
"title": "string", #OPTIONAL
"subtitle": "string", #OPTIONAL
"content": "string" #OPTIONAL
}
"""
card = {"type": card_type}
if title: card["title"] = title
if subtitle: card["subtitle"] = subtitle
if content: card["content"] = content
return card
|
anjishnu/ask-alexa-pykit | ask/alexa_io.py | ResponseBuilder.create_card | python | def create_card(self, title=None, subtitle=None, content=None, card_type="Simple"):
card = {"type": card_type}
if title: card["title"] = title
if subtitle: card["subtitle"] = subtitle
if content: card["content"] = content
return card | card_obj = JSON card object to substitute the 'card' field in the raw_response
format:
{
"type": "Simple", #COMPULSORY
"title": "string", #OPTIONAL
"subtitle": "string", #OPTIONAL
"content": "string" #OPTIONAL
} | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/alexa_io.py#L135-L150 | null | class ResponseBuilder(object):
"""
Simple class to help users to build responses
"""
base_response = eval(RAW_RESPONSE)
@classmethod
def create_response(self, message=None, end_session=False, card_obj=None,
reprompt_message=None, is_ssml=None):
"""
message - text message to be spoken out by the Echo
end_session - flag to determine whether this interaction should end the session
card_obj = JSON card object to substitute the 'card' field in the raw_response
"""
response = dict(self.base_response)
if message:
response['response'] = self.create_speech(message, is_ssml)
response['response']['shouldEndSession'] = end_session
if card_obj:
response['response']['card'] = card_obj
if reprompt_message:
response['response']['reprompt'] = self.create_speech(reprompt_message, is_ssml)
return Response(response)
@classmethod
def respond(self, *args, **kwargs):
return self.create_response(*args, **kwargs)
@classmethod
def create_speech(self, message=None, is_ssml=False):
data = {}
if is_ssml:
data['type'], data['ssml'] = "SSML", message
else:
data['type'] = "PlainText"
data['text'] = message
return {"outputSpeech": data}
@classmethod
|
anjishnu/ask-alexa-pykit | ask/alexa_io.py | VoiceHandler.intent | python | def intent(self, intent):
''' Decorator to register intent handler'''
def _handler(func):
self._handlers['IntentRequest'][intent] = func
return func
return _handler | Decorator to register intent handler | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/alexa_io.py#L177-L184 | null | class VoiceHandler(ResponseBuilder):
""" Decorator to store function metadata
Functions that are annotated with this label are
treated as voice handlers """
def __init__(self):
"""
>>> alexa = VoiceHandler()
>>> request =
>>> @alexa.intent('HelloWorldIntent')
... def hello_world(request):
... return alexa.create_response('hello world')
>>> alexa.route_request(request)
"""
self._handlers = { "IntentRequest" : {} }
self._default = '_default_'
def default(self, func):
''' Decorator to register default handler '''
self._handlers[self._default] = func
return func
def request(self, request_type):
''' Decorator to register generic request handler '''
def _handler(func):
self._handlers[request_type] = func
return func
return _handler
def route_request(self, request_json, metadata=None):
''' Route the request object to the right handler function '''
request = Request(request_json)
request.metadata = metadata
# add reprompt handler or some such for default?
handler_fn = self._handlers[self._default] # Set default handling for noisy requests
if not request.is_intent() and (request.request_type() in self._handlers):
''' Route request to a non intent handler '''
handler_fn = self._handlers[request.request_type()]
elif request.is_intent() and request.intent_name() in self._handlers['IntentRequest']:
''' Route to right intent handler '''
handler_fn = self._handlers['IntentRequest'][request.intent_name()]
response = handler_fn(request)
response.set_session(request.session)
return response.to_json()
|
anjishnu/ask-alexa-pykit | ask/alexa_io.py | VoiceHandler.request | python | def request(self, request_type):
''' Decorator to register generic request handler '''
def _handler(func):
self._handlers[request_type] = func
return func
return _handler | Decorator to register generic request handler | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/alexa_io.py#L186-L193 | null | class VoiceHandler(ResponseBuilder):
""" Decorator to store function metadata
Functions that are annotated with this label are
treated as voice handlers """
def __init__(self):
"""
>>> alexa = VoiceHandler()
>>> request =
>>> @alexa.intent('HelloWorldIntent')
... def hello_world(request):
... return alexa.create_response('hello world')
>>> alexa.route_request(request)
"""
self._handlers = { "IntentRequest" : {} }
self._default = '_default_'
def default(self, func):
''' Decorator to register default handler '''
self._handlers[self._default] = func
return func
def intent(self, intent):
''' Decorator to register intent handler'''
def _handler(func):
self._handlers['IntentRequest'][intent] = func
return func
return _handler
def route_request(self, request_json, metadata=None):
''' Route the request object to the right handler function '''
request = Request(request_json)
request.metadata = metadata
# add reprompt handler or some such for default?
handler_fn = self._handlers[self._default] # Set default handling for noisy requests
if not request.is_intent() and (request.request_type() in self._handlers):
''' Route request to a non intent handler '''
handler_fn = self._handlers[request.request_type()]
elif request.is_intent() and request.intent_name() in self._handlers['IntentRequest']:
''' Route to right intent handler '''
handler_fn = self._handlers['IntentRequest'][request.intent_name()]
response = handler_fn(request)
response.set_session(request.session)
return response.to_json()
|
anjishnu/ask-alexa-pykit | ask/alexa_io.py | VoiceHandler.route_request | python | def route_request(self, request_json, metadata=None):
''' Route the request object to the right handler function '''
request = Request(request_json)
request.metadata = metadata
# add reprompt handler or some such for default?
handler_fn = self._handlers[self._default] # Set default handling for noisy requests
if not request.is_intent() and (request.request_type() in self._handlers):
''' Route request to a non intent handler '''
handler_fn = self._handlers[request.request_type()]
elif request.is_intent() and request.intent_name() in self._handlers['IntentRequest']:
''' Route to right intent handler '''
handler_fn = self._handlers['IntentRequest'][request.intent_name()]
response = handler_fn(request)
response.set_session(request.session)
return response.to_json() | Route the request object to the right handler function | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/alexa_io.py#L195-L213 | [
"def is_intent(self):\n if self.intent_name() is None:\n return False\n return True\n"
] | class VoiceHandler(ResponseBuilder):
""" Decorator to store function metadata
Functions that are annotated with this label are
treated as voice handlers """
def __init__(self):
"""
>>> alexa = VoiceHandler()
>>> request =
>>> @alexa.intent('HelloWorldIntent')
... def hello_world(request):
... return alexa.create_response('hello world')
>>> alexa.route_request(request)
"""
self._handlers = { "IntentRequest" : {} }
self._default = '_default_'
def default(self, func):
''' Decorator to register default handler '''
self._handlers[self._default] = func
return func
def intent(self, intent):
''' Decorator to register intent handler'''
def _handler(func):
self._handlers['IntentRequest'][intent] = func
return func
return _handler
def request(self, request_type):
''' Decorator to register generic request handler '''
def _handler(func):
self._handlers[request_type] = func
return func
return _handler
|
anjishnu/ask-alexa-pykit | ask/intent_schema.py | IntentSchema._add_intent_interactive | python | def _add_intent_interactive(self, intent_num=0):
'''
Interactively add a new intent to the intent schema object
'''
print ("Name of intent number : ", intent_num)
slot_type_mappings = load_builtin_slots()
intent_name = read_from_user(str)
print ("How many slots?")
num_slots = read_from_user(int)
slot_list = []
for i in range(num_slots):
print ("Slot name no.", i+1)
slot_name = read_from_user(str).strip()
print ("Slot type? Enter a number for AMAZON supported types below,"
"else enter a string for a Custom Slot")
print (json.dumps(slot_type_mappings, indent=True))
slot_type_str = read_from_user(str)
try: slot_type = slot_type_mappings[int(slot_type_str)]['name']
except: slot_type = slot_type_str
slot_list += [self.build_slot(slot_name, slot_type)]
self.add_intent(intent_name, slot_list) | Interactively add a new intent to the intent schema object | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/intent_schema.py#L65-L85 | [
"def read_from_user(input_type, *args, **kwargs):\n '''\n Helper function to prompt user for input of a specific type \n e.g. float, str, int \n Designed to work with both python 2 and 3 \n Yes I know this is ugly.\n '''\n\n def _read_in(*args, **kwargs):\n while True:\n try: ... | class IntentSchema(object):
'''
Wrapper class to manipulate Intent Schema
'''
def __init__(self, json_obj=None):
if json_obj:
# Use existing intent schema
self._obj = json_obj
else:
# Create one from scratch
self._obj = OrderedDict({ "intents" : [] })
# These intents are basically always needed
# for certification
self.add_intent('AMAZON.HelpIntent')
self.add_intent('AMAZON.StopIntent')
self.add_intent('AMAZON.CancelIntent')
def add_intent(self, intent_name, slots=None):
if not slots: slots = []
intent = OrderedDict()
intent ['intent'], intent['slots'] = intent_name, slots
self._obj['intents'].append(intent)
def build_slot(self, slot_name, slot_type):
slot = OrderedDict()
slot['name'], slot['type'] = slot_name, slot_type
return slot
def __str__(self):
return json.dumps(self._obj, indent=2)
def get_intents(self):
return self._obj['intents']
def get_intent_names(self):
return [intent['intent'] for intent in self.get_intents()]
@classmethod
def interactive_build(self, fpath=None):
intent_schema = IntentSchema.from_filename(fpath)
print ("How many intents would you like to add")
num = read_from_user(int)
for i in range(num):
intent_schema._add_intent_interactive(intent_num=i+1)
return intent_schema
def save_to_file(self, filename):
with open(filename, 'w') as fp:
print(self, file=fp)
@classmethod
def from_filename(self, filename):
'''
Build an IntentSchema from a file path
creates a new intent schema if the file does not exist, throws an error if the file
exists but cannot be loaded as a JSON
'''
if os.path.exists(filename):
with open(filename) as fp:
return IntentSchema(json.load(fp, object_pairs_hook=OrderedDict))
else:
print ('File does not exist')
return IntentSchema()
|
anjishnu/ask-alexa-pykit | ask/intent_schema.py | IntentSchema.from_filename | python | def from_filename(self, filename):
'''
Build an IntentSchema from a file path
creates a new intent schema if the file does not exist, throws an error if the file
exists but cannot be loaded as a JSON
'''
if os.path.exists(filename):
with open(filename) as fp:
return IntentSchema(json.load(fp, object_pairs_hook=OrderedDict))
else:
print ('File does not exist')
return IntentSchema() | Build an IntentSchema from a file path
creates a new intent schema if the file does not exist, throws an error if the file
exists but cannot be loaded as a JSON | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/ask/intent_schema.py#L89-L100 | null | class IntentSchema(object):
'''
Wrapper class to manipulate Intent Schema
'''
def __init__(self, json_obj=None):
if json_obj:
# Use existing intent schema
self._obj = json_obj
else:
# Create one from scratch
self._obj = OrderedDict({ "intents" : [] })
# These intents are basically always needed
# for certification
self.add_intent('AMAZON.HelpIntent')
self.add_intent('AMAZON.StopIntent')
self.add_intent('AMAZON.CancelIntent')
def add_intent(self, intent_name, slots=None):
if not slots: slots = []
intent = OrderedDict()
intent ['intent'], intent['slots'] = intent_name, slots
self._obj['intents'].append(intent)
def build_slot(self, slot_name, slot_type):
slot = OrderedDict()
slot['name'], slot['type'] = slot_name, slot_type
return slot
def __str__(self):
return json.dumps(self._obj, indent=2)
def get_intents(self):
return self._obj['intents']
def get_intent_names(self):
return [intent['intent'] for intent in self.get_intents()]
@classmethod
def interactive_build(self, fpath=None):
intent_schema = IntentSchema.from_filename(fpath)
print ("How many intents would you like to add")
num = read_from_user(int)
for i in range(num):
intent_schema._add_intent_interactive(intent_num=i+1)
return intent_schema
def save_to_file(self, filename):
with open(filename, 'w') as fp:
print(self, file=fp)
def _add_intent_interactive(self, intent_num=0):
'''
Interactively add a new intent to the intent schema object
'''
print ("Name of intent number : ", intent_num)
slot_type_mappings = load_builtin_slots()
intent_name = read_from_user(str)
print ("How many slots?")
num_slots = read_from_user(int)
slot_list = []
for i in range(num_slots):
print ("Slot name no.", i+1)
slot_name = read_from_user(str).strip()
print ("Slot type? Enter a number for AMAZON supported types below,"
"else enter a string for a Custom Slot")
print (json.dumps(slot_type_mappings, indent=True))
slot_type_str = read_from_user(str)
try: slot_type = slot_type_mappings[int(slot_type_str)]['name']
except: slot_type = slot_type_str
slot_list += [self.build_slot(slot_name, slot_type)]
self.add_intent(intent_name, slot_list)
@classmethod
|
anjishnu/ask-alexa-pykit | examples/twitter/lambda_function.py | launch_request_handler | python | def launch_request_handler(request):
user_id = request.access_token()
if user_id in twitter_cache.users():
user_cache = twitter_cache.get_user_state(user_id)
user_cache["amzn_id"]= request.user_id()
base_message = "Welcome to Twitter, {} . How may I help you today ?".format(user_cache["screen_name"])
print (user_cache)
if 'pending_action' in user_cache:
base_message += " You have one pending action . "
print ("Found pending action")
if 'description' in user_cache['pending_action']:
print ("Found description")
base_message += user_cache['pending_action']['description']
return r.create_response(base_message)
card = r.create_card(title="Please log into twitter", card_type="LinkAccount")
return r.create_response(message="Welcome to twitter, looks like you haven't logged in!"
" Log in via the alexa app.", card_obj=card,
end_session=True) | Annotate functions with @VoiceHandler so that they can be automatically mapped
to request types. Use the 'request_type' field to map them to non-intent requests | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/lambda_function.py#L23-L45 | [
"def users(self):\n return self.memcache['users']\n"
] | from ask import alexa
from config import TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET
from twitter import local_cache as twitter_cache
from twitter import (post_tweet, get_home_tweets, get_retweets_of_me,
get_my_favourite_tweets, get_my_favourite_tweets,
get_latest_twitter_mentions, search_for_tweets_about,
get_user_latest_tweets, get_user_twitter_details,
geo_search, closest_trend_search, list_trends)
# Run this code once on startup to load twitter keys into credentials
server_cache_state = twitter_cache.get_server_state()
if 'twitter_keys' not in server_cache_state:
server_cache_state['twitter_keys'] = (TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET)
def default_handler(request):
""" The default handler gets invoked if no handler is set for a request """
return launch_request_handler(request)
@alexa.request(request_type="LaunchRequest")
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
return alexa.create_response(message="Goodbye!")
@alexa.intent(intent='PostTweet')
def post_tweet_intent_handler(request):
"""
Use the 'intent' field in the VoiceHandler to map to the respective intent.
"""
tweet = request.get_slot_value("Tweet")
tweet = tweet if tweet else ""
if tweet:
user_state = twitter_cache.get_user_state(request.access_token())
def action():
return post_tweet(request.access_token(), tweet)
message = "I am ready to post the tweet, {} ,\n Please say yes to confirm or stop to cancel .".format(tweet)
user_state['pending_action'] = {"action" : action,
"description" : message}
return r.create_response(message=message, end_session=False)
else:
# No tweet could be disambiguated
message = " ".join(
[
"I'm sorry, I couldn't understand what you wanted to tweet .",
"Please prepend the message with either post or tweet ."
]
)
return alexa.create_response(message=message, end_session=False)
@alexa.intent(intent="SearchTrends")
def find_trends_handler(request):
uid = request.access_token()
user_cache = twitter_cache.get_user_state(uid)
resolved_location = False
message = ""
location = request.get_slot_value("Location")
should_end_session = True
if not location:
# Get trends for user's current location
user_details = get_user_twitter_details(uid)
location = user_details[0]['location']
if location:
message += "Finding trends near you . "
else:
message += "I could not figure out where you are, please set it up on your twitter account . "
if location:
response = geo_search(request.access_token(), location) # convert natural language text to location
top_result = response['result']['places'][0]
lon, lat = top_result['centroid']
trend_params = {"lat" : lat, "long" : lon}
trend_location = closest_trend_search(request.access_token(), trend_params) # find closest woeid which has trends
woeid = trend_location[0]['woeid']
trends = list_trends(request.access_token(), trend_location[0]['woeid']) # List top trends
trend_lst = [trend['name'] for trend in trends[0]['trends']]
message += "The top trending topics near {0} are, ".format(trend_location[0]['name'])
message += "\n".join(["{0}, {1}, ".format(index+1, trend) for index, trend in enumerate(trend_lst)])
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent(intent="AMAZON.HelpIntent")
def help_intent_handler(request):
msg = ("I can do several things for you on twitter! "
"I can tell you about the top tweets on your home page, or the last tweets you favourited . "
"I can also tell you about recent tweets that mention you, or were posted by you . "
"When I am reading out a list of tweets, you can stop me and ask me to tell you about the tweet in more detail, or ask me to post a reply to it . "
"And of course, whenever post a tweet, say 'post hello world' or 'tweet hello world'. I am not good with hashtags or trending topics just yet, but I'm working on it! ")
return r.create_response(message=msg)
@alexa.intent(intent="AMAZON.StopIntent")
def stop_intent__handler(request):
return cancel_action_handler(request)
@alexa.intent(intent="AMAZON.CancelIntent")
def cancel_intent_handler(request):
return cancel_action_handler(request)
MAX_RESPONSE_TWEETS = 3
def tweet_list_handler(request, tweet_list_builder, msg_prefix=""):
""" This is a generic function to handle any intent that reads out a list of tweets"""
# tweet_list_builder is a function that takes a unique identifier and returns a list of things to say
tweets = tweet_list_builder(request.access_token())
print (len(tweets), 'tweets found')
if tweets:
twitter_cache.initialize_user_queue(user_id=request.access_token(),
queue=tweets)
text_to_read_out = twitter_cache.user_queue(request.access_token()).read_out_next(MAX_RESPONSE_TWEETS)
message = msg_prefix + text_to_read_out + ", say 'next' to hear more, or reply to a tweet by number."
return alexa.create_response(message=message,
end_session=False)
else:
return alexa.create_response(message="Sorry, no tweets found, please try something else",
end_session=False)
@alexa.intent(intent="SearchTweets")
def search_tweets_handler(request):
search_topic = request.get_slot_value("Topic")
max_tweets = 3
if search_topic:
message = "Searching twitter for tweets about {} . ".format(search_topic)
def search_tweets_builder(uid):
params = {
"q" : search_topic,
"result_type" : "popular"
}
return search_for_tweets_about(request.access_token(), params)
return tweet_list_handler(request, tweet_list_builder=search_tweets_builder, msg_prefix=message)
else:
return r.create_response("I couldn't find a topic to search for in your request")
@alexa.intent(intent="FindLatestMentions")
def list_mentions_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_latest_twitter_mentions, msg_prefix="Looking for tweets that mention you.")
@alexa.intent(intent="ListHomeTweets")
def list_home_tweets_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_home_tweets)
@alexa.intent(intent="UserTweets")
def list_user_tweets_handler(request):
""" by default gets tweets for current user """
return tweet_list_handler(request, tweet_list_builder=get_user_latest_tweets, msg_prefix="Looking for tweets posted by you.")
@alexa.intent(intent="RetweetsOfMe")
def list_retweets_of_me_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_retweets_of_me, msg_prefix="Looking for retweets.")
@alexa.intent(intent="FindFavouriteTweets")
def find_my_favourites_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_my_favourite_tweets, msg_prefix="Finding your favourite tweets.")
def focused_on_tweet(request):
"""
Return index if focused on tweet False if couldn't
"""
slots = request.get_slot_map()
if "Index" in slots and slots["Index"]:
index = int(slots['Index'])
elif "Ordinal" in slots and slots["Index"]:
parse_ordinal = lambda inp : int("".join([l for l in inp if l in string.digits]))
index = parse_ordinal(slots['Ordinal'])
else:
return False
index = index - 1 # Going from regular notation to CS notation
user_state = twitter_cache.get_user_state(request.access_token())
queue = user_state['user_queue'].queue()
if index < len(queue):
# Analyze tweet in queue
tweet_to_analyze = queue[index]
user_state['focus_tweet'] = tweet_to_analyze
return index + 1 # Returning to regular notation
twitter_cache.serialize()
return False
"""
Definining API for executing pending actions:
action = function that does everything you want and returns a 'message' to return.
description = read out in case there is a pending action at startup.
other metadata will be added as time progresses
"""
@alexa.intent("ReplyIntent")
def reply_handler(request):
message = "Sorry, I couldn't tell which tweet you want to reply to. "
slots = request.get_slot_map()
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if not slots["Tweet"]:
return reply_focus_handler(request)
else:
can_reply = False
if slots['Tweet'] and not (slots['Ordinal'] or slots['Index']):
user_state = twitter_cache.get_user_state(request.access_token())
if 'focus_tweet' in user_state: # User is focused on a tweet
can_reply = True
else:
index = focused_on_tweet(request)
if index: can_reply = True
if can_reply: # Successfully focused on a tweet
index, focus_tweet = user_state['focus_tweet']
tweet_message = "@{0} {1}".format(focus_tweet.get_screen_name(),
slots['Tweet'])
params = {"in_reply_to_status_id": focus_tweet.get_id()}
def action():
print ("Performing action! lambda functions are awesome!")
message = post_tweet(request.access_token(), tweet_message, params)
del user_state['focus_tweet']
return message
should_end_session = False
message = "I am ready to post the tweet, {}. Please say yes to confirm or stop to cancel.".format(slots['Tweet'])
user_state['pending_action'] = {"action" : action,
"description" : message }
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent("YesIntent")
def confirm_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
params = user_state['pending_action']
# Perform action
message = params['action']()
if 'message' in params:
message = params['message']
if 'callback' in params:
params['callback']()
del user_state['pending_action']
print ("successfully executed command")
message = message + " would you like me to do anything else ? "
should_end_session = False
return alexa.create_response(message, end_session=should_end_session)
@alexa.intent("AMAZON.CancelIntent")
def cancel_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
del user_state['pending_action'] # Clearing out the user's pending action
print ("cleared user_state")
message += " i won't do it. would you like me to do something else ? "
should_end_session = False
return r.create_response(message, end_session=should_end_session)
@alexa.intent("ReplyFocus")
def reply_focus_handler(request):
msg = "Sorry, I couldn't tell which tweet you wanted to reply to."
index = focused_on_tweet(request)
if index:
return alexa.create_response(message="Do you want to reply to tweet {} ? If so say reply, followed by your message".format(index))
return alexa.create_response(message=msg, end_session=False)
@alexa.intent("MoreInfo")
def more_info_handler(request):
index = focused_on_tweet(request)
if index:
user_state = twitter_cache.get_user_state(request.access_token())
index, tweet = user_state['focus_tweet']
message = " ".join(["details about tweet number {}.".format(index+1), tweet.detailed_description(),
"To reply, say 'reply' followed by your message"])
return alexa.create_response(message=message, end_session=False)
return reply_focus_handler(request)
@alexa.intent("NextIntent")
def next_intent_handler(request):
"""
Takes care of things whenver the user says 'next'
"""
message = "Sorry, couldn't find anything in your next queue"
end_session = True
if True:
user_queue = twitter_cache.user_queue(request.access_token())
if not user_queue.is_finished():
message = user_queue.read_out_next(MAX_RESPONSE_TWEETS)
if not user_queue.is_finished():
end_session = False
message = message + ". Please, say 'next' if you want me to read out more. "
return alexa.create_response(message=message,
end_session=end_session)
@alexa.intent(intent="PreviousIntent")
def previous_intent_handler(request):
user_queue = twitter_cache.user_queue(request.access_token())
if user_queue and user_queue.has_prev():
message = user_queue.read_out_prev()
else:
message = "I couldn't find anything to repeat"
return alexa.create_response(message=message)
|
anjishnu/ask-alexa-pykit | examples/twitter/lambda_function.py | post_tweet_intent_handler | python | def post_tweet_intent_handler(request):
tweet = request.get_slot_value("Tweet")
tweet = tweet if tweet else ""
if tweet:
user_state = twitter_cache.get_user_state(request.access_token())
def action():
return post_tweet(request.access_token(), tweet)
message = "I am ready to post the tweet, {} ,\n Please say yes to confirm or stop to cancel .".format(tweet)
user_state['pending_action'] = {"action" : action,
"description" : message}
return r.create_response(message=message, end_session=False)
else:
# No tweet could be disambiguated
message = " ".join(
[
"I'm sorry, I couldn't understand what you wanted to tweet .",
"Please prepend the message with either post or tweet ."
]
)
return alexa.create_response(message=message, end_session=False) | Use the 'intent' field in the VoiceHandler to map to the respective intent. | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/lambda_function.py#L54-L77 | [
"def create_response(self, message=None, end_session=False, card_obj=None,\n reprompt_message=None, is_ssml=None):\n \"\"\"\n message - text message to be spoken out by the Echo\n end_session - flag to determine whether this interaction should end the session\n card_obj = JSON card ob... | from ask import alexa
from config import TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET
from twitter import local_cache as twitter_cache
from twitter import (post_tweet, get_home_tweets, get_retweets_of_me,
get_my_favourite_tweets, get_my_favourite_tweets,
get_latest_twitter_mentions, search_for_tweets_about,
get_user_latest_tweets, get_user_twitter_details,
geo_search, closest_trend_search, list_trends)
# Run this code once on startup to load twitter keys into credentials
server_cache_state = twitter_cache.get_server_state()
if 'twitter_keys' not in server_cache_state:
server_cache_state['twitter_keys'] = (TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET)
def default_handler(request):
""" The default handler gets invoked if no handler is set for a request """
return launch_request_handler(request)
@alexa.request(request_type="LaunchRequest")
def launch_request_handler(request):
""" Annotate functions with @VoiceHandler so that they can be automatically mapped
to request types. Use the 'request_type' field to map them to non-intent requests """
user_id = request.access_token()
if user_id in twitter_cache.users():
user_cache = twitter_cache.get_user_state(user_id)
user_cache["amzn_id"]= request.user_id()
base_message = "Welcome to Twitter, {} . How may I help you today ?".format(user_cache["screen_name"])
print (user_cache)
if 'pending_action' in user_cache:
base_message += " You have one pending action . "
print ("Found pending action")
if 'description' in user_cache['pending_action']:
print ("Found description")
base_message += user_cache['pending_action']['description']
return r.create_response(base_message)
card = r.create_card(title="Please log into twitter", card_type="LinkAccount")
return r.create_response(message="Welcome to twitter, looks like you haven't logged in!"
" Log in via the alexa app.", card_obj=card,
end_session=True)
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
return alexa.create_response(message="Goodbye!")
@alexa.intent(intent='PostTweet')
@alexa.intent(intent="SearchTrends")
def find_trends_handler(request):
uid = request.access_token()
user_cache = twitter_cache.get_user_state(uid)
resolved_location = False
message = ""
location = request.get_slot_value("Location")
should_end_session = True
if not location:
# Get trends for user's current location
user_details = get_user_twitter_details(uid)
location = user_details[0]['location']
if location:
message += "Finding trends near you . "
else:
message += "I could not figure out where you are, please set it up on your twitter account . "
if location:
response = geo_search(request.access_token(), location) # convert natural language text to location
top_result = response['result']['places'][0]
lon, lat = top_result['centroid']
trend_params = {"lat" : lat, "long" : lon}
trend_location = closest_trend_search(request.access_token(), trend_params) # find closest woeid which has trends
woeid = trend_location[0]['woeid']
trends = list_trends(request.access_token(), trend_location[0]['woeid']) # List top trends
trend_lst = [trend['name'] for trend in trends[0]['trends']]
message += "The top trending topics near {0} are, ".format(trend_location[0]['name'])
message += "\n".join(["{0}, {1}, ".format(index+1, trend) for index, trend in enumerate(trend_lst)])
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent(intent="AMAZON.HelpIntent")
def help_intent_handler(request):
msg = ("I can do several things for you on twitter! "
"I can tell you about the top tweets on your home page, or the last tweets you favourited . "
"I can also tell you about recent tweets that mention you, or were posted by you . "
"When I am reading out a list of tweets, you can stop me and ask me to tell you about the tweet in more detail, or ask me to post a reply to it . "
"And of course, whenever post a tweet, say 'post hello world' or 'tweet hello world'. I am not good with hashtags or trending topics just yet, but I'm working on it! ")
return r.create_response(message=msg)
@alexa.intent(intent="AMAZON.StopIntent")
def stop_intent__handler(request):
return cancel_action_handler(request)
@alexa.intent(intent="AMAZON.CancelIntent")
def cancel_intent_handler(request):
return cancel_action_handler(request)
MAX_RESPONSE_TWEETS = 3
def tweet_list_handler(request, tweet_list_builder, msg_prefix=""):
""" This is a generic function to handle any intent that reads out a list of tweets"""
# tweet_list_builder is a function that takes a unique identifier and returns a list of things to say
tweets = tweet_list_builder(request.access_token())
print (len(tweets), 'tweets found')
if tweets:
twitter_cache.initialize_user_queue(user_id=request.access_token(),
queue=tweets)
text_to_read_out = twitter_cache.user_queue(request.access_token()).read_out_next(MAX_RESPONSE_TWEETS)
message = msg_prefix + text_to_read_out + ", say 'next' to hear more, or reply to a tweet by number."
return alexa.create_response(message=message,
end_session=False)
else:
return alexa.create_response(message="Sorry, no tweets found, please try something else",
end_session=False)
@alexa.intent(intent="SearchTweets")
def search_tweets_handler(request):
search_topic = request.get_slot_value("Topic")
max_tweets = 3
if search_topic:
message = "Searching twitter for tweets about {} . ".format(search_topic)
def search_tweets_builder(uid):
params = {
"q" : search_topic,
"result_type" : "popular"
}
return search_for_tweets_about(request.access_token(), params)
return tweet_list_handler(request, tweet_list_builder=search_tweets_builder, msg_prefix=message)
else:
return r.create_response("I couldn't find a topic to search for in your request")
@alexa.intent(intent="FindLatestMentions")
def list_mentions_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_latest_twitter_mentions, msg_prefix="Looking for tweets that mention you.")
@alexa.intent(intent="ListHomeTweets")
def list_home_tweets_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_home_tweets)
@alexa.intent(intent="UserTweets")
def list_user_tweets_handler(request):
""" by default gets tweets for current user """
return tweet_list_handler(request, tweet_list_builder=get_user_latest_tweets, msg_prefix="Looking for tweets posted by you.")
@alexa.intent(intent="RetweetsOfMe")
def list_retweets_of_me_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_retweets_of_me, msg_prefix="Looking for retweets.")
@alexa.intent(intent="FindFavouriteTweets")
def find_my_favourites_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_my_favourite_tweets, msg_prefix="Finding your favourite tweets.")
def focused_on_tweet(request):
"""
Return index if focused on tweet False if couldn't
"""
slots = request.get_slot_map()
if "Index" in slots and slots["Index"]:
index = int(slots['Index'])
elif "Ordinal" in slots and slots["Index"]:
parse_ordinal = lambda inp : int("".join([l for l in inp if l in string.digits]))
index = parse_ordinal(slots['Ordinal'])
else:
return False
index = index - 1 # Going from regular notation to CS notation
user_state = twitter_cache.get_user_state(request.access_token())
queue = user_state['user_queue'].queue()
if index < len(queue):
# Analyze tweet in queue
tweet_to_analyze = queue[index]
user_state['focus_tweet'] = tweet_to_analyze
return index + 1 # Returning to regular notation
twitter_cache.serialize()
return False
"""
Definining API for executing pending actions:
action = function that does everything you want and returns a 'message' to return.
description = read out in case there is a pending action at startup.
other metadata will be added as time progresses
"""
@alexa.intent("ReplyIntent")
def reply_handler(request):
message = "Sorry, I couldn't tell which tweet you want to reply to. "
slots = request.get_slot_map()
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if not slots["Tweet"]:
return reply_focus_handler(request)
else:
can_reply = False
if slots['Tweet'] and not (slots['Ordinal'] or slots['Index']):
user_state = twitter_cache.get_user_state(request.access_token())
if 'focus_tweet' in user_state: # User is focused on a tweet
can_reply = True
else:
index = focused_on_tweet(request)
if index: can_reply = True
if can_reply: # Successfully focused on a tweet
index, focus_tweet = user_state['focus_tweet']
tweet_message = "@{0} {1}".format(focus_tweet.get_screen_name(),
slots['Tweet'])
params = {"in_reply_to_status_id": focus_tweet.get_id()}
def action():
print ("Performing action! lambda functions are awesome!")
message = post_tweet(request.access_token(), tweet_message, params)
del user_state['focus_tweet']
return message
should_end_session = False
message = "I am ready to post the tweet, {}. Please say yes to confirm or stop to cancel.".format(slots['Tweet'])
user_state['pending_action'] = {"action" : action,
"description" : message }
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent("YesIntent")
def confirm_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
params = user_state['pending_action']
# Perform action
message = params['action']()
if 'message' in params:
message = params['message']
if 'callback' in params:
params['callback']()
del user_state['pending_action']
print ("successfully executed command")
message = message + " would you like me to do anything else ? "
should_end_session = False
return alexa.create_response(message, end_session=should_end_session)
@alexa.intent("AMAZON.CancelIntent")
def cancel_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
del user_state['pending_action'] # Clearing out the user's pending action
print ("cleared user_state")
message += " i won't do it. would you like me to do something else ? "
should_end_session = False
return r.create_response(message, end_session=should_end_session)
@alexa.intent("ReplyFocus")
def reply_focus_handler(request):
msg = "Sorry, I couldn't tell which tweet you wanted to reply to."
index = focused_on_tweet(request)
if index:
return alexa.create_response(message="Do you want to reply to tweet {} ? If so say reply, followed by your message".format(index))
return alexa.create_response(message=msg, end_session=False)
@alexa.intent("MoreInfo")
def more_info_handler(request):
index = focused_on_tweet(request)
if index:
user_state = twitter_cache.get_user_state(request.access_token())
index, tweet = user_state['focus_tweet']
message = " ".join(["details about tweet number {}.".format(index+1), tweet.detailed_description(),
"To reply, say 'reply' followed by your message"])
return alexa.create_response(message=message, end_session=False)
return reply_focus_handler(request)
@alexa.intent("NextIntent")
def next_intent_handler(request):
"""
Takes care of things whenver the user says 'next'
"""
message = "Sorry, couldn't find anything in your next queue"
end_session = True
if True:
user_queue = twitter_cache.user_queue(request.access_token())
if not user_queue.is_finished():
message = user_queue.read_out_next(MAX_RESPONSE_TWEETS)
if not user_queue.is_finished():
end_session = False
message = message + ". Please, say 'next' if you want me to read out more. "
return alexa.create_response(message=message,
end_session=end_session)
@alexa.intent(intent="PreviousIntent")
def previous_intent_handler(request):
user_queue = twitter_cache.user_queue(request.access_token())
if user_queue and user_queue.has_prev():
message = user_queue.read_out_prev()
else:
message = "I couldn't find anything to repeat"
return alexa.create_response(message=message)
|
anjishnu/ask-alexa-pykit | examples/twitter/lambda_function.py | tweet_list_handler | python | def tweet_list_handler(request, tweet_list_builder, msg_prefix=""):
# tweet_list_builder is a function that takes a unique identifier and returns a list of things to say
tweets = tweet_list_builder(request.access_token())
print (len(tweets), 'tweets found')
if tweets:
twitter_cache.initialize_user_queue(user_id=request.access_token(),
queue=tweets)
text_to_read_out = twitter_cache.user_queue(request.access_token()).read_out_next(MAX_RESPONSE_TWEETS)
message = msg_prefix + text_to_read_out + ", say 'next' to hear more, or reply to a tweet by number."
return alexa.create_response(message=message,
end_session=False)
else:
return alexa.create_response(message="Sorry, no tweets found, please try something else",
end_session=False) | This is a generic function to handle any intent that reads out a list of tweets | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/lambda_function.py#L137-L152 | [
"def get_home_tweets(user_id, input_params={}):\n url = \"https://api.twitter.com/1.1/statuses/home_timeline.json\"\n print (\"Trying to get home tweets\")\n response = request_tweet_list(url, user_id)\n return response\n",
"def get_retweets_of_me(user_id, input_params={}):\n \"\"\" returns recentl... | from ask import alexa
from config import TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET
from twitter import local_cache as twitter_cache
from twitter import (post_tweet, get_home_tweets, get_retweets_of_me,
get_my_favourite_tweets, get_my_favourite_tweets,
get_latest_twitter_mentions, search_for_tweets_about,
get_user_latest_tweets, get_user_twitter_details,
geo_search, closest_trend_search, list_trends)
# Run this code once on startup to load twitter keys into credentials
server_cache_state = twitter_cache.get_server_state()
if 'twitter_keys' not in server_cache_state:
server_cache_state['twitter_keys'] = (TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET)
def default_handler(request):
""" The default handler gets invoked if no handler is set for a request """
return launch_request_handler(request)
@alexa.request(request_type="LaunchRequest")
def launch_request_handler(request):
""" Annotate functions with @VoiceHandler so that they can be automatically mapped
to request types. Use the 'request_type' field to map them to non-intent requests """
user_id = request.access_token()
if user_id in twitter_cache.users():
user_cache = twitter_cache.get_user_state(user_id)
user_cache["amzn_id"]= request.user_id()
base_message = "Welcome to Twitter, {} . How may I help you today ?".format(user_cache["screen_name"])
print (user_cache)
if 'pending_action' in user_cache:
base_message += " You have one pending action . "
print ("Found pending action")
if 'description' in user_cache['pending_action']:
print ("Found description")
base_message += user_cache['pending_action']['description']
return r.create_response(base_message)
card = r.create_card(title="Please log into twitter", card_type="LinkAccount")
return r.create_response(message="Welcome to twitter, looks like you haven't logged in!"
" Log in via the alexa app.", card_obj=card,
end_session=True)
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
return alexa.create_response(message="Goodbye!")
@alexa.intent(intent='PostTweet')
def post_tweet_intent_handler(request):
"""
Use the 'intent' field in the VoiceHandler to map to the respective intent.
"""
tweet = request.get_slot_value("Tweet")
tweet = tweet if tweet else ""
if tweet:
user_state = twitter_cache.get_user_state(request.access_token())
def action():
return post_tweet(request.access_token(), tweet)
message = "I am ready to post the tweet, {} ,\n Please say yes to confirm or stop to cancel .".format(tweet)
user_state['pending_action'] = {"action" : action,
"description" : message}
return r.create_response(message=message, end_session=False)
else:
# No tweet could be disambiguated
message = " ".join(
[
"I'm sorry, I couldn't understand what you wanted to tweet .",
"Please prepend the message with either post or tweet ."
]
)
return alexa.create_response(message=message, end_session=False)
@alexa.intent(intent="SearchTrends")
def find_trends_handler(request):
uid = request.access_token()
user_cache = twitter_cache.get_user_state(uid)
resolved_location = False
message = ""
location = request.get_slot_value("Location")
should_end_session = True
if not location:
# Get trends for user's current location
user_details = get_user_twitter_details(uid)
location = user_details[0]['location']
if location:
message += "Finding trends near you . "
else:
message += "I could not figure out where you are, please set it up on your twitter account . "
if location:
response = geo_search(request.access_token(), location) # convert natural language text to location
top_result = response['result']['places'][0]
lon, lat = top_result['centroid']
trend_params = {"lat" : lat, "long" : lon}
trend_location = closest_trend_search(request.access_token(), trend_params) # find closest woeid which has trends
woeid = trend_location[0]['woeid']
trends = list_trends(request.access_token(), trend_location[0]['woeid']) # List top trends
trend_lst = [trend['name'] for trend in trends[0]['trends']]
message += "The top trending topics near {0} are, ".format(trend_location[0]['name'])
message += "\n".join(["{0}, {1}, ".format(index+1, trend) for index, trend in enumerate(trend_lst)])
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent(intent="AMAZON.HelpIntent")
def help_intent_handler(request):
msg = ("I can do several things for you on twitter! "
"I can tell you about the top tweets on your home page, or the last tweets you favourited . "
"I can also tell you about recent tweets that mention you, or were posted by you . "
"When I am reading out a list of tweets, you can stop me and ask me to tell you about the tweet in more detail, or ask me to post a reply to it . "
"And of course, whenever post a tweet, say 'post hello world' or 'tweet hello world'. I am not good with hashtags or trending topics just yet, but I'm working on it! ")
return r.create_response(message=msg)
@alexa.intent(intent="AMAZON.StopIntent")
def stop_intent__handler(request):
return cancel_action_handler(request)
@alexa.intent(intent="AMAZON.CancelIntent")
def cancel_intent_handler(request):
return cancel_action_handler(request)
MAX_RESPONSE_TWEETS = 3
@alexa.intent(intent="SearchTweets")
def search_tweets_handler(request):
search_topic = request.get_slot_value("Topic")
max_tweets = 3
if search_topic:
message = "Searching twitter for tweets about {} . ".format(search_topic)
def search_tweets_builder(uid):
params = {
"q" : search_topic,
"result_type" : "popular"
}
return search_for_tweets_about(request.access_token(), params)
return tweet_list_handler(request, tweet_list_builder=search_tweets_builder, msg_prefix=message)
else:
return r.create_response("I couldn't find a topic to search for in your request")
@alexa.intent(intent="FindLatestMentions")
def list_mentions_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_latest_twitter_mentions, msg_prefix="Looking for tweets that mention you.")
@alexa.intent(intent="ListHomeTweets")
def list_home_tweets_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_home_tweets)
@alexa.intent(intent="UserTweets")
def list_user_tweets_handler(request):
""" by default gets tweets for current user """
return tweet_list_handler(request, tweet_list_builder=get_user_latest_tweets, msg_prefix="Looking for tweets posted by you.")
@alexa.intent(intent="RetweetsOfMe")
def list_retweets_of_me_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_retweets_of_me, msg_prefix="Looking for retweets.")
@alexa.intent(intent="FindFavouriteTweets")
def find_my_favourites_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_my_favourite_tweets, msg_prefix="Finding your favourite tweets.")
def focused_on_tweet(request):
"""
Return index if focused on tweet False if couldn't
"""
slots = request.get_slot_map()
if "Index" in slots and slots["Index"]:
index = int(slots['Index'])
elif "Ordinal" in slots and slots["Index"]:
parse_ordinal = lambda inp : int("".join([l for l in inp if l in string.digits]))
index = parse_ordinal(slots['Ordinal'])
else:
return False
index = index - 1 # Going from regular notation to CS notation
user_state = twitter_cache.get_user_state(request.access_token())
queue = user_state['user_queue'].queue()
if index < len(queue):
# Analyze tweet in queue
tweet_to_analyze = queue[index]
user_state['focus_tweet'] = tweet_to_analyze
return index + 1 # Returning to regular notation
twitter_cache.serialize()
return False
"""
Definining API for executing pending actions:
action = function that does everything you want and returns a 'message' to return.
description = read out in case there is a pending action at startup.
other metadata will be added as time progresses
"""
@alexa.intent("ReplyIntent")
def reply_handler(request):
message = "Sorry, I couldn't tell which tweet you want to reply to. "
slots = request.get_slot_map()
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if not slots["Tweet"]:
return reply_focus_handler(request)
else:
can_reply = False
if slots['Tweet'] and not (slots['Ordinal'] or slots['Index']):
user_state = twitter_cache.get_user_state(request.access_token())
if 'focus_tweet' in user_state: # User is focused on a tweet
can_reply = True
else:
index = focused_on_tweet(request)
if index: can_reply = True
if can_reply: # Successfully focused on a tweet
index, focus_tweet = user_state['focus_tweet']
tweet_message = "@{0} {1}".format(focus_tweet.get_screen_name(),
slots['Tweet'])
params = {"in_reply_to_status_id": focus_tweet.get_id()}
def action():
print ("Performing action! lambda functions are awesome!")
message = post_tweet(request.access_token(), tweet_message, params)
del user_state['focus_tweet']
return message
should_end_session = False
message = "I am ready to post the tweet, {}. Please say yes to confirm or stop to cancel.".format(slots['Tweet'])
user_state['pending_action'] = {"action" : action,
"description" : message }
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent("YesIntent")
def confirm_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
params = user_state['pending_action']
# Perform action
message = params['action']()
if 'message' in params:
message = params['message']
if 'callback' in params:
params['callback']()
del user_state['pending_action']
print ("successfully executed command")
message = message + " would you like me to do anything else ? "
should_end_session = False
return alexa.create_response(message, end_session=should_end_session)
@alexa.intent("AMAZON.CancelIntent")
def cancel_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
del user_state['pending_action'] # Clearing out the user's pending action
print ("cleared user_state")
message += " i won't do it. would you like me to do something else ? "
should_end_session = False
return r.create_response(message, end_session=should_end_session)
@alexa.intent("ReplyFocus")
def reply_focus_handler(request):
msg = "Sorry, I couldn't tell which tweet you wanted to reply to."
index = focused_on_tweet(request)
if index:
return alexa.create_response(message="Do you want to reply to tweet {} ? If so say reply, followed by your message".format(index))
return alexa.create_response(message=msg, end_session=False)
@alexa.intent("MoreInfo")
def more_info_handler(request):
index = focused_on_tweet(request)
if index:
user_state = twitter_cache.get_user_state(request.access_token())
index, tweet = user_state['focus_tweet']
message = " ".join(["details about tweet number {}.".format(index+1), tweet.detailed_description(),
"To reply, say 'reply' followed by your message"])
return alexa.create_response(message=message, end_session=False)
return reply_focus_handler(request)
@alexa.intent("NextIntent")
def next_intent_handler(request):
"""
Takes care of things whenver the user says 'next'
"""
message = "Sorry, couldn't find anything in your next queue"
end_session = True
if True:
user_queue = twitter_cache.user_queue(request.access_token())
if not user_queue.is_finished():
message = user_queue.read_out_next(MAX_RESPONSE_TWEETS)
if not user_queue.is_finished():
end_session = False
message = message + ". Please, say 'next' if you want me to read out more. "
return alexa.create_response(message=message,
end_session=end_session)
@alexa.intent(intent="PreviousIntent")
def previous_intent_handler(request):
user_queue = twitter_cache.user_queue(request.access_token())
if user_queue and user_queue.has_prev():
message = user_queue.read_out_prev()
else:
message = "I couldn't find anything to repeat"
return alexa.create_response(message=message)
|
anjishnu/ask-alexa-pykit | examples/twitter/lambda_function.py | focused_on_tweet | python | def focused_on_tweet(request):
slots = request.get_slot_map()
if "Index" in slots and slots["Index"]:
index = int(slots['Index'])
elif "Ordinal" in slots and slots["Index"]:
parse_ordinal = lambda inp : int("".join([l for l in inp if l in string.digits]))
index = parse_ordinal(slots['Ordinal'])
else:
return False
index = index - 1 # Going from regular notation to CS notation
user_state = twitter_cache.get_user_state(request.access_token())
queue = user_state['user_queue'].queue()
if index < len(queue):
# Analyze tweet in queue
tweet_to_analyze = queue[index]
user_state['focus_tweet'] = tweet_to_analyze
return index + 1 # Returning to regular notation
twitter_cache.serialize()
return False | Return index if focused on tweet False if couldn't | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/lambda_function.py#L198-L221 | [
"parse_ordinal = lambda inp : int(\"\".join([l for l in inp if l in string.digits]))\n",
"def get_user_state(self, user_id):\n return self.memcache['users'][user_id]\n"
] | from ask import alexa
from config import TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET
from twitter import local_cache as twitter_cache
from twitter import (post_tweet, get_home_tweets, get_retweets_of_me,
get_my_favourite_tweets, get_my_favourite_tweets,
get_latest_twitter_mentions, search_for_tweets_about,
get_user_latest_tweets, get_user_twitter_details,
geo_search, closest_trend_search, list_trends)
# Run this code once on startup to load twitter keys into credentials
server_cache_state = twitter_cache.get_server_state()
if 'twitter_keys' not in server_cache_state:
server_cache_state['twitter_keys'] = (TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET)
def default_handler(request):
""" The default handler gets invoked if no handler is set for a request """
return launch_request_handler(request)
@alexa.request(request_type="LaunchRequest")
def launch_request_handler(request):
""" Annotate functions with @VoiceHandler so that they can be automatically mapped
to request types. Use the 'request_type' field to map them to non-intent requests """
user_id = request.access_token()
if user_id in twitter_cache.users():
user_cache = twitter_cache.get_user_state(user_id)
user_cache["amzn_id"]= request.user_id()
base_message = "Welcome to Twitter, {} . How may I help you today ?".format(user_cache["screen_name"])
print (user_cache)
if 'pending_action' in user_cache:
base_message += " You have one pending action . "
print ("Found pending action")
if 'description' in user_cache['pending_action']:
print ("Found description")
base_message += user_cache['pending_action']['description']
return r.create_response(base_message)
card = r.create_card(title="Please log into twitter", card_type="LinkAccount")
return r.create_response(message="Welcome to twitter, looks like you haven't logged in!"
" Log in via the alexa app.", card_obj=card,
end_session=True)
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
return alexa.create_response(message="Goodbye!")
@alexa.intent(intent='PostTweet')
def post_tweet_intent_handler(request):
"""
Use the 'intent' field in the VoiceHandler to map to the respective intent.
"""
tweet = request.get_slot_value("Tweet")
tweet = tweet if tweet else ""
if tweet:
user_state = twitter_cache.get_user_state(request.access_token())
def action():
return post_tweet(request.access_token(), tweet)
message = "I am ready to post the tweet, {} ,\n Please say yes to confirm or stop to cancel .".format(tweet)
user_state['pending_action'] = {"action" : action,
"description" : message}
return r.create_response(message=message, end_session=False)
else:
# No tweet could be disambiguated
message = " ".join(
[
"I'm sorry, I couldn't understand what you wanted to tweet .",
"Please prepend the message with either post or tweet ."
]
)
return alexa.create_response(message=message, end_session=False)
@alexa.intent(intent="SearchTrends")
def find_trends_handler(request):
uid = request.access_token()
user_cache = twitter_cache.get_user_state(uid)
resolved_location = False
message = ""
location = request.get_slot_value("Location")
should_end_session = True
if not location:
# Get trends for user's current location
user_details = get_user_twitter_details(uid)
location = user_details[0]['location']
if location:
message += "Finding trends near you . "
else:
message += "I could not figure out where you are, please set it up on your twitter account . "
if location:
response = geo_search(request.access_token(), location) # convert natural language text to location
top_result = response['result']['places'][0]
lon, lat = top_result['centroid']
trend_params = {"lat" : lat, "long" : lon}
trend_location = closest_trend_search(request.access_token(), trend_params) # find closest woeid which has trends
woeid = trend_location[0]['woeid']
trends = list_trends(request.access_token(), trend_location[0]['woeid']) # List top trends
trend_lst = [trend['name'] for trend in trends[0]['trends']]
message += "The top trending topics near {0} are, ".format(trend_location[0]['name'])
message += "\n".join(["{0}, {1}, ".format(index+1, trend) for index, trend in enumerate(trend_lst)])
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent(intent="AMAZON.HelpIntent")
def help_intent_handler(request):
msg = ("I can do several things for you on twitter! "
"I can tell you about the top tweets on your home page, or the last tweets you favourited . "
"I can also tell you about recent tweets that mention you, or were posted by you . "
"When I am reading out a list of tweets, you can stop me and ask me to tell you about the tweet in more detail, or ask me to post a reply to it . "
"And of course, whenever post a tweet, say 'post hello world' or 'tweet hello world'. I am not good with hashtags or trending topics just yet, but I'm working on it! ")
return r.create_response(message=msg)
@alexa.intent(intent="AMAZON.StopIntent")
def stop_intent__handler(request):
return cancel_action_handler(request)
@alexa.intent(intent="AMAZON.CancelIntent")
def cancel_intent_handler(request):
return cancel_action_handler(request)
MAX_RESPONSE_TWEETS = 3
def tweet_list_handler(request, tweet_list_builder, msg_prefix=""):
""" This is a generic function to handle any intent that reads out a list of tweets"""
# tweet_list_builder is a function that takes a unique identifier and returns a list of things to say
tweets = tweet_list_builder(request.access_token())
print (len(tweets), 'tweets found')
if tweets:
twitter_cache.initialize_user_queue(user_id=request.access_token(),
queue=tweets)
text_to_read_out = twitter_cache.user_queue(request.access_token()).read_out_next(MAX_RESPONSE_TWEETS)
message = msg_prefix + text_to_read_out + ", say 'next' to hear more, or reply to a tweet by number."
return alexa.create_response(message=message,
end_session=False)
else:
return alexa.create_response(message="Sorry, no tweets found, please try something else",
end_session=False)
@alexa.intent(intent="SearchTweets")
def search_tweets_handler(request):
search_topic = request.get_slot_value("Topic")
max_tweets = 3
if search_topic:
message = "Searching twitter for tweets about {} . ".format(search_topic)
def search_tweets_builder(uid):
params = {
"q" : search_topic,
"result_type" : "popular"
}
return search_for_tweets_about(request.access_token(), params)
return tweet_list_handler(request, tweet_list_builder=search_tweets_builder, msg_prefix=message)
else:
return r.create_response("I couldn't find a topic to search for in your request")
@alexa.intent(intent="FindLatestMentions")
def list_mentions_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_latest_twitter_mentions, msg_prefix="Looking for tweets that mention you.")
@alexa.intent(intent="ListHomeTweets")
def list_home_tweets_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_home_tweets)
@alexa.intent(intent="UserTweets")
def list_user_tweets_handler(request):
""" by default gets tweets for current user """
return tweet_list_handler(request, tweet_list_builder=get_user_latest_tweets, msg_prefix="Looking for tweets posted by you.")
@alexa.intent(intent="RetweetsOfMe")
def list_retweets_of_me_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_retweets_of_me, msg_prefix="Looking for retweets.")
@alexa.intent(intent="FindFavouriteTweets")
def find_my_favourites_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_my_favourite_tweets, msg_prefix="Finding your favourite tweets.")
def focused_on_tweet(request):
"""
Return index if focused on tweet False if couldn't
"""
slots = request.get_slot_map()
if "Index" in slots and slots["Index"]:
index = int(slots['Index'])
elif "Ordinal" in slots and slots["Index"]:
parse_ordinal = lambda inp : int("".join([l for l in inp if l in string.digits]))
index = parse_ordinal(slots['Ordinal'])
else:
return False
index = index - 1 # Going from regular notation to CS notation
user_state = twitter_cache.get_user_state(request.access_token())
queue = user_state['user_queue'].queue()
if index < len(queue):
# Analyze tweet in queue
tweet_to_analyze = queue[index]
user_state['focus_tweet'] = tweet_to_analyze
return index + 1 # Returning to regular notation
twitter_cache.serialize()
return False
"""
Definining API for executing pending actions:
action = function that does everything you want and returns a 'message' to return.
description = read out in case there is a pending action at startup.
other metadata will be added as time progresses
"""
@alexa.intent("ReplyIntent")
def reply_handler(request):
message = "Sorry, I couldn't tell which tweet you want to reply to. "
slots = request.get_slot_map()
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if not slots["Tweet"]:
return reply_focus_handler(request)
else:
can_reply = False
if slots['Tweet'] and not (slots['Ordinal'] or slots['Index']):
user_state = twitter_cache.get_user_state(request.access_token())
if 'focus_tweet' in user_state: # User is focused on a tweet
can_reply = True
else:
index = focused_on_tweet(request)
if index: can_reply = True
if can_reply: # Successfully focused on a tweet
index, focus_tweet = user_state['focus_tweet']
tweet_message = "@{0} {1}".format(focus_tweet.get_screen_name(),
slots['Tweet'])
params = {"in_reply_to_status_id": focus_tweet.get_id()}
def action():
print ("Performing action! lambda functions are awesome!")
message = post_tweet(request.access_token(), tweet_message, params)
del user_state['focus_tweet']
return message
should_end_session = False
message = "I am ready to post the tweet, {}. Please say yes to confirm or stop to cancel.".format(slots['Tweet'])
user_state['pending_action'] = {"action" : action,
"description" : message }
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent("YesIntent")
def confirm_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
params = user_state['pending_action']
# Perform action
message = params['action']()
if 'message' in params:
message = params['message']
if 'callback' in params:
params['callback']()
del user_state['pending_action']
print ("successfully executed command")
message = message + " would you like me to do anything else ? "
should_end_session = False
return alexa.create_response(message, end_session=should_end_session)
@alexa.intent("AMAZON.CancelIntent")
def cancel_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
del user_state['pending_action'] # Clearing out the user's pending action
print ("cleared user_state")
message += " i won't do it. would you like me to do something else ? "
should_end_session = False
return r.create_response(message, end_session=should_end_session)
@alexa.intent("ReplyFocus")
def reply_focus_handler(request):
msg = "Sorry, I couldn't tell which tweet you wanted to reply to."
index = focused_on_tweet(request)
if index:
return alexa.create_response(message="Do you want to reply to tweet {} ? If so say reply, followed by your message".format(index))
return alexa.create_response(message=msg, end_session=False)
@alexa.intent("MoreInfo")
def more_info_handler(request):
index = focused_on_tweet(request)
if index:
user_state = twitter_cache.get_user_state(request.access_token())
index, tweet = user_state['focus_tweet']
message = " ".join(["details about tweet number {}.".format(index+1), tweet.detailed_description(),
"To reply, say 'reply' followed by your message"])
return alexa.create_response(message=message, end_session=False)
return reply_focus_handler(request)
@alexa.intent("NextIntent")
def next_intent_handler(request):
"""
Takes care of things whenver the user says 'next'
"""
message = "Sorry, couldn't find anything in your next queue"
end_session = True
if True:
user_queue = twitter_cache.user_queue(request.access_token())
if not user_queue.is_finished():
message = user_queue.read_out_next(MAX_RESPONSE_TWEETS)
if not user_queue.is_finished():
end_session = False
message = message + ". Please, say 'next' if you want me to read out more. "
return alexa.create_response(message=message,
end_session=end_session)
@alexa.intent(intent="PreviousIntent")
def previous_intent_handler(request):
user_queue = twitter_cache.user_queue(request.access_token())
if user_queue and user_queue.has_prev():
message = user_queue.read_out_prev()
else:
message = "I couldn't find anything to repeat"
return alexa.create_response(message=message)
|
anjishnu/ask-alexa-pykit | examples/twitter/lambda_function.py | next_intent_handler | python | def next_intent_handler(request):
message = "Sorry, couldn't find anything in your next queue"
end_session = True
if True:
user_queue = twitter_cache.user_queue(request.access_token())
if not user_queue.is_finished():
message = user_queue.read_out_next(MAX_RESPONSE_TWEETS)
if not user_queue.is_finished():
end_session = False
message = message + ". Please, say 'next' if you want me to read out more. "
return alexa.create_response(message=message,
end_session=end_session) | Takes care of things whenver the user says 'next' | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/lambda_function.py#L322-L337 | [
"def create_response(self, message=None, end_session=False, card_obj=None,\n reprompt_message=None, is_ssml=None):\n \"\"\"\n message - text message to be spoken out by the Echo\n end_session - flag to determine whether this interaction should end the session\n card_obj = JSON card ob... | from ask import alexa
from config import TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET
from twitter import local_cache as twitter_cache
from twitter import (post_tweet, get_home_tweets, get_retweets_of_me,
get_my_favourite_tweets, get_my_favourite_tweets,
get_latest_twitter_mentions, search_for_tweets_about,
get_user_latest_tweets, get_user_twitter_details,
geo_search, closest_trend_search, list_trends)
# Run this code once on startup to load twitter keys into credentials
server_cache_state = twitter_cache.get_server_state()
if 'twitter_keys' not in server_cache_state:
server_cache_state['twitter_keys'] = (TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET)
def default_handler(request):
""" The default handler gets invoked if no handler is set for a request """
return launch_request_handler(request)
@alexa.request(request_type="LaunchRequest")
def launch_request_handler(request):
""" Annotate functions with @VoiceHandler so that they can be automatically mapped
to request types. Use the 'request_type' field to map them to non-intent requests """
user_id = request.access_token()
if user_id in twitter_cache.users():
user_cache = twitter_cache.get_user_state(user_id)
user_cache["amzn_id"]= request.user_id()
base_message = "Welcome to Twitter, {} . How may I help you today ?".format(user_cache["screen_name"])
print (user_cache)
if 'pending_action' in user_cache:
base_message += " You have one pending action . "
print ("Found pending action")
if 'description' in user_cache['pending_action']:
print ("Found description")
base_message += user_cache['pending_action']['description']
return r.create_response(base_message)
card = r.create_card(title="Please log into twitter", card_type="LinkAccount")
return r.create_response(message="Welcome to twitter, looks like you haven't logged in!"
" Log in via the alexa app.", card_obj=card,
end_session=True)
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
return alexa.create_response(message="Goodbye!")
@alexa.intent(intent='PostTweet')
def post_tweet_intent_handler(request):
"""
Use the 'intent' field in the VoiceHandler to map to the respective intent.
"""
tweet = request.get_slot_value("Tweet")
tweet = tweet if tweet else ""
if tweet:
user_state = twitter_cache.get_user_state(request.access_token())
def action():
return post_tweet(request.access_token(), tweet)
message = "I am ready to post the tweet, {} ,\n Please say yes to confirm or stop to cancel .".format(tweet)
user_state['pending_action'] = {"action" : action,
"description" : message}
return r.create_response(message=message, end_session=False)
else:
# No tweet could be disambiguated
message = " ".join(
[
"I'm sorry, I couldn't understand what you wanted to tweet .",
"Please prepend the message with either post or tweet ."
]
)
return alexa.create_response(message=message, end_session=False)
@alexa.intent(intent="SearchTrends")
def find_trends_handler(request):
uid = request.access_token()
user_cache = twitter_cache.get_user_state(uid)
resolved_location = False
message = ""
location = request.get_slot_value("Location")
should_end_session = True
if not location:
# Get trends for user's current location
user_details = get_user_twitter_details(uid)
location = user_details[0]['location']
if location:
message += "Finding trends near you . "
else:
message += "I could not figure out where you are, please set it up on your twitter account . "
if location:
response = geo_search(request.access_token(), location) # convert natural language text to location
top_result = response['result']['places'][0]
lon, lat = top_result['centroid']
trend_params = {"lat" : lat, "long" : lon}
trend_location = closest_trend_search(request.access_token(), trend_params) # find closest woeid which has trends
woeid = trend_location[0]['woeid']
trends = list_trends(request.access_token(), trend_location[0]['woeid']) # List top trends
trend_lst = [trend['name'] for trend in trends[0]['trends']]
message += "The top trending topics near {0} are, ".format(trend_location[0]['name'])
message += "\n".join(["{0}, {1}, ".format(index+1, trend) for index, trend in enumerate(trend_lst)])
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent(intent="AMAZON.HelpIntent")
def help_intent_handler(request):
msg = ("I can do several things for you on twitter! "
"I can tell you about the top tweets on your home page, or the last tweets you favourited . "
"I can also tell you about recent tweets that mention you, or were posted by you . "
"When I am reading out a list of tweets, you can stop me and ask me to tell you about the tweet in more detail, or ask me to post a reply to it . "
"And of course, whenever post a tweet, say 'post hello world' or 'tweet hello world'. I am not good with hashtags or trending topics just yet, but I'm working on it! ")
return r.create_response(message=msg)
@alexa.intent(intent="AMAZON.StopIntent")
def stop_intent__handler(request):
return cancel_action_handler(request)
@alexa.intent(intent="AMAZON.CancelIntent")
def cancel_intent_handler(request):
return cancel_action_handler(request)
MAX_RESPONSE_TWEETS = 3
def tweet_list_handler(request, tweet_list_builder, msg_prefix=""):
""" This is a generic function to handle any intent that reads out a list of tweets"""
# tweet_list_builder is a function that takes a unique identifier and returns a list of things to say
tweets = tweet_list_builder(request.access_token())
print (len(tweets), 'tweets found')
if tweets:
twitter_cache.initialize_user_queue(user_id=request.access_token(),
queue=tweets)
text_to_read_out = twitter_cache.user_queue(request.access_token()).read_out_next(MAX_RESPONSE_TWEETS)
message = msg_prefix + text_to_read_out + ", say 'next' to hear more, or reply to a tweet by number."
return alexa.create_response(message=message,
end_session=False)
else:
return alexa.create_response(message="Sorry, no tweets found, please try something else",
end_session=False)
@alexa.intent(intent="SearchTweets")
def search_tweets_handler(request):
search_topic = request.get_slot_value("Topic")
max_tweets = 3
if search_topic:
message = "Searching twitter for tweets about {} . ".format(search_topic)
def search_tweets_builder(uid):
params = {
"q" : search_topic,
"result_type" : "popular"
}
return search_for_tweets_about(request.access_token(), params)
return tweet_list_handler(request, tweet_list_builder=search_tweets_builder, msg_prefix=message)
else:
return r.create_response("I couldn't find a topic to search for in your request")
@alexa.intent(intent="FindLatestMentions")
def list_mentions_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_latest_twitter_mentions, msg_prefix="Looking for tweets that mention you.")
@alexa.intent(intent="ListHomeTweets")
def list_home_tweets_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_home_tweets)
@alexa.intent(intent="UserTweets")
def list_user_tweets_handler(request):
""" by default gets tweets for current user """
return tweet_list_handler(request, tweet_list_builder=get_user_latest_tweets, msg_prefix="Looking for tweets posted by you.")
@alexa.intent(intent="RetweetsOfMe")
def list_retweets_of_me_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_retweets_of_me, msg_prefix="Looking for retweets.")
@alexa.intent(intent="FindFavouriteTweets")
def find_my_favourites_handler(request):
return tweet_list_handler(request, tweet_list_builder=get_my_favourite_tweets, msg_prefix="Finding your favourite tweets.")
def focused_on_tweet(request):
"""
Return index if focused on tweet False if couldn't
"""
slots = request.get_slot_map()
if "Index" in slots and slots["Index"]:
index = int(slots['Index'])
elif "Ordinal" in slots and slots["Index"]:
parse_ordinal = lambda inp : int("".join([l for l in inp if l in string.digits]))
index = parse_ordinal(slots['Ordinal'])
else:
return False
index = index - 1 # Going from regular notation to CS notation
user_state = twitter_cache.get_user_state(request.access_token())
queue = user_state['user_queue'].queue()
if index < len(queue):
# Analyze tweet in queue
tweet_to_analyze = queue[index]
user_state['focus_tweet'] = tweet_to_analyze
return index + 1 # Returning to regular notation
twitter_cache.serialize()
return False
"""
Definining API for executing pending actions:
action = function that does everything you want and returns a 'message' to return.
description = read out in case there is a pending action at startup.
other metadata will be added as time progresses
"""
@alexa.intent("ReplyIntent")
def reply_handler(request):
message = "Sorry, I couldn't tell which tweet you want to reply to. "
slots = request.get_slot_map()
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if not slots["Tweet"]:
return reply_focus_handler(request)
else:
can_reply = False
if slots['Tweet'] and not (slots['Ordinal'] or slots['Index']):
user_state = twitter_cache.get_user_state(request.access_token())
if 'focus_tweet' in user_state: # User is focused on a tweet
can_reply = True
else:
index = focused_on_tweet(request)
if index: can_reply = True
if can_reply: # Successfully focused on a tweet
index, focus_tweet = user_state['focus_tweet']
tweet_message = "@{0} {1}".format(focus_tweet.get_screen_name(),
slots['Tweet'])
params = {"in_reply_to_status_id": focus_tweet.get_id()}
def action():
print ("Performing action! lambda functions are awesome!")
message = post_tweet(request.access_token(), tweet_message, params)
del user_state['focus_tweet']
return message
should_end_session = False
message = "I am ready to post the tweet, {}. Please say yes to confirm or stop to cancel.".format(slots['Tweet'])
user_state['pending_action'] = {"action" : action,
"description" : message }
return alexa.create_response(message=message, end_session=should_end_session)
@alexa.intent("YesIntent")
def confirm_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
params = user_state['pending_action']
# Perform action
message = params['action']()
if 'message' in params:
message = params['message']
if 'callback' in params:
params['callback']()
del user_state['pending_action']
print ("successfully executed command")
message = message + " would you like me to do anything else ? "
should_end_session = False
return alexa.create_response(message, end_session=should_end_session)
@alexa.intent("AMAZON.CancelIntent")
def cancel_action_handler(request):
message = "okay."
user_state = twitter_cache.get_user_state(request.access_token())
should_end_session = True
if 'pending_action' in user_state:
del user_state['pending_action'] # Clearing out the user's pending action
print ("cleared user_state")
message += " i won't do it. would you like me to do something else ? "
should_end_session = False
return r.create_response(message, end_session=should_end_session)
@alexa.intent("ReplyFocus")
def reply_focus_handler(request):
msg = "Sorry, I couldn't tell which tweet you wanted to reply to."
index = focused_on_tweet(request)
if index:
return alexa.create_response(message="Do you want to reply to tweet {} ? If so say reply, followed by your message".format(index))
return alexa.create_response(message=msg, end_session=False)
@alexa.intent("MoreInfo")
def more_info_handler(request):
index = focused_on_tweet(request)
if index:
user_state = twitter_cache.get_user_state(request.access_token())
index, tweet = user_state['focus_tweet']
message = " ".join(["details about tweet number {}.".format(index+1), tweet.detailed_description(),
"To reply, say 'reply' followed by your message"])
return alexa.create_response(message=message, end_session=False)
return reply_focus_handler(request)
@alexa.intent("NextIntent")
@alexa.intent(intent="PreviousIntent")
def previous_intent_handler(request):
user_queue = twitter_cache.user_queue(request.access_token())
if user_queue and user_queue.has_prev():
message = user_queue.read_out_prev()
else:
message = "I couldn't find anything to repeat"
return alexa.create_response(message=message)
|
anjishnu/ask-alexa-pykit | examples/useful_science/useful_science.py | SimplePostsCache.refresh_cache | python | def refresh_cache(self, cat_id):
'''
Repopulate cache
'''
self.cache[cat_id] = most_recent_25_posts_by_category(cat_id)
self.last_refresh[cat_id] = datetime.now()
print ('Cache refresh at...', str(self.last_refresh[cat_id])) | Repopulate cache | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/useful_science/useful_science.py#L57-L63 | [
"def most_recent_25_posts_by_category(category_id):\n if not category_id:\n return most_recent_25_posts()\n end_point = \"http://www.usefulscience.org/api/posts/{}\".format(category_id)\n response = requests.get(end_point)\n return response.json()['posts']\n"
] | class SimplePostsCache(object):
'''
Seconds
'''
def __init__(self, refresh_rate=_1_MINUTE):
self.cache = {cat_id : list()
for cat_name, cat_id in categories.items()}
self.refresh_rate = refresh_rate # Seconds
# Splitting refresh times by category so that one unlucky person doesn't
# have to wait for 10 API calls to complete
self.last_refresh = {cat_id : datetime.now()
for cat_name, cat_id in categories.items()}
for cat_id in self.last_refresh:
self.refresh_cache(cat_id)
# print (json.dumps(self.cache, indent=4))
def get_post(self, category):
cat_id = categories[category]
if ((datetime.now() - self.last_refresh[cat_id]).seconds
> self.refresh_rate):
# Time for a refresh !
self.refresh_cache(cat_id)
return random.choice(self.cache[cat_id])['post']
|
anjishnu/ask-alexa-pykit | lambda_function.py | get_recipe_intent_handler | python | def get_recipe_intent_handler(request):
# Get variables like userId, slots, intent name etc from the 'Request' object
ingredient = request.slots["Ingredient"] # Gets an Ingredient Slot from the Request object.
if ingredient == None:
return alexa.create_response("Could not find an ingredient!")
# All manipulations to the request's session object are automatically reflected in the request returned to Amazon.
# For e.g. This statement adds a new session attribute (automatically returned with the response) storing the
# Last seen ingredient value in the 'last_ingredient' key.
request.session['last_ingredient'] = ingredient # Automatically returned as a sessionAttribute
# Modifying state like this saves us from explicitly having to return Session objects after every response
# alexa can also build cards which can be sent as part of the response
card = alexa.create_card(title="GetRecipeIntent activated", subtitle=None,
content="asked alexa to find a recipe using {}".format(ingredient))
return alexa.create_response("Finding a recipe with the ingredient {}".format(ingredient),
end_session=False, card_obj=card) | You can insert arbitrary business logic code here | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/lambda_function.py#L47-L71 | [
"def create_response(self, message=None, end_session=False, card_obj=None,\n reprompt_message=None, is_ssml=None):\n \"\"\"\n message - text message to be spoken out by the Echo\n end_session - flag to determine whether this interaction should end the session\n card_obj = JSON card ob... | """
In this file we specify default event handlers which are then populated into the handler map using metaprogramming
Copyright Anjishnu Kumar 2015
Happy Hacking!
"""
from ask import alexa
def lambda_handler(request_obj, context=None):
'''
This is the main function to enter to enter into this code.
If you are hosting this code on AWS Lambda, this should be the entry point.
Otherwise your server can hit this code as long as you remember that the
input 'request_obj' is JSON request converted into a nested python object.
'''
metadata = {'user_name' : 'SomeRandomDude'} # add your own metadata to the request using key value pairs
''' inject user relevant metadata into the request if you want to, here.
e.g. Something like :
... metadata = {'user_name' : some_database.query_user_name(request.get_user_id())}
Then in the handler function you can do something like -
... return alexa.create_response('Hello there {}!'.format(request.metadata['user_name']))
'''
return alexa.route_request(request_obj, metadata)
@alexa.default
def default_handler(request):
""" The default handler gets invoked if no handler is set for a request type """
return alexa.respond('Just ask').with_card('Hello World')
@alexa.request("LaunchRequest")
def launch_request_handler(request):
''' Handler for LaunchRequest '''
return alexa.create_response(message="Hello Welcome to My Recipes!")
@alexa.request("SessionEndedRequest")
def session_ended_request_handler(request):
return alexa.create_response(message="Goodbye!")
@alexa.intent('GetRecipeIntent')
def get_recipe_intent_handler(request):
"""
You can insert arbitrary business logic code here
"""
# Get variables like userId, slots, intent name etc from the 'Request' object
ingredient = request.slots["Ingredient"] # Gets an Ingredient Slot from the Request object.
if ingredient == None:
return alexa.create_response("Could not find an ingredient!")
# All manipulations to the request's session object are automatically reflected in the request returned to Amazon.
# For e.g. This statement adds a new session attribute (automatically returned with the response) storing the
# Last seen ingredient value in the 'last_ingredient' key.
request.session['last_ingredient'] = ingredient # Automatically returned as a sessionAttribute
# Modifying state like this saves us from explicitly having to return Session objects after every response
# alexa can also build cards which can be sent as part of the response
card = alexa.create_card(title="GetRecipeIntent activated", subtitle=None,
content="asked alexa to find a recipe using {}".format(ingredient))
return alexa.create_response("Finding a recipe with the ingredient {}".format(ingredient),
end_session=False, card_obj=card)
@alexa.intent('NextRecipeIntent')
def next_recipe_intent_handler(request):
"""
You can insert arbitrary business logic code here
"""
return alexa.create_response(message="Getting Next Recipe ... 123")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--serve','-s', action='store_true', default=False)
args = parser.parse_args()
if args.serve:
###
# This will only be run if you try to run the server in local mode
##
print('Serving ASK functionality locally.')
import flask
server = flask.Flask(__name__)
@server.route('/')
def alexa_skills_kit_requests():
request_obj = flask.request.get_json()
return lambda_handler(request_obj)
server.run()
|
anjishnu/ask-alexa-pykit | examples/twitter/twitter.py | strip_html | python | def strip_html(text):
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rply_msg = ""
if len(replying_to) > 0:
rply_msg = "Replying to "
for token in replying_to[:-1]: rply_msg += token+","
if len(replying_to)>1: rply_msg += 'and '
rply_msg += replying_to[-1]+". "
return rply_msg + " ".join(message)
text = reply_to(text)
text = text.replace('@', ' ')
return " ".join([token for token in text.split()
if ('http:' not in token) and ('https:' not in token)]) | Get rid of ugly twitter html | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/twitter.py#L169-L190 | [
"def reply_to(text):\n replying_to = []\n split_text = text.split()\n for index, token in enumerate(split_text):\n if token.startswith('@'): replying_to.append(token[1:])\n else:\n message = split_text[index:]\n break\n rply_msg = \"\"\n if len(replying_to) > 0:\n ... | import requests
import jsonpickle
from requests_oauthlib import OAuth1
from urllib.parse import parse_qs, urlencode
import cherrypy
from collections import defaultdict
import json
import os
import re
from collections import defaultdict
# For readable serializations
jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
class LocalCache(object):
""" Generic class for encapsulating twitter credential caching """
server_data_template = "{}.server"
user_data_template = "{0}.user.{1}"
def __init__(self, backup = "tmp/twitter.cache"):
self.backup = backup #Unique identifier for the backup of this cache
self.memcache = {
"users" : defaultdict(lambda : {}),
"server": defaultdict(lambda : {})
}
self.deserialize()
def users(self):
return self.memcache['users']
def set_user_state(self, user_id, state):
self.memcache['users'][user_id] = state
def update_user_state(self, user_id, state = {}):
self.memcache['users'][user_id].update(state)
def get_user_state(self, user_id):
return self.memcache['users'][user_id]
def clear_user_state(self, user_id):
return self.memcache['users'][user_id].clear()
def update_server_state(self, state_dict):
self.memcache['server'].update(state_dict)
def get_server_state(self):
return self.memcache['server']
def clear_server_state(self):
return self.memcache['server'].clear()
def initialize_user_queue(self, user_id, queue):
self.memcache['users'][user_id]['user_queue'] = ReadableQueue(queue)
def user_queue(self, user_id):
if 'user_queue' in self.memcache['users'][user_id]:
return self.memcache['users'][user_id]['user_queue']
def server_fname(self):
return self.server_data_template.format(self.backup)
def user_fname(self, user):
return self.user_data_template.format(self.backup, user)
def deserialize(self):
cache_loaded = False
if os.path.exists(self.server_fname()) and not os.path.isdir(self.backup):
try:
self.memcache = { "server" : {},
"users" : {} }
with open(self.server_fname()) as backupfile:
print ("Attempting to reload cache")
self.memcache['server'] = jsonpickle.decode(backupfile.read())
print ("Server cache loaded", json.dumps(self.memcache, indent=4))
for user in self.memcache['server']['user_list']:
# Try to load as much user data as possible
if os.path.exists(self.user_fname(user)):
print ("found path for user", user)
with open(self.user_fname(user)) as userfile:
user_data = jsonpickle.decode(userfile.read())
self.memcache['users'][user] = user_data
cache_loaded = True
except Exception as e:
print ("Cache file corrupted...")
raise e
if not cache_loaded:
print ("Cache could not be loaded")
pass
else:
print ("CACHE LOADED SUCCESSFULLY!")
def serialize(self):
json_to_serialize = self.memcache['server']
user_list = list(self.users().keys())
json_to_serialize.update({"user_list" : user_list})
with open(self.server_fname(), 'w') as backup_server:
# Serialize Server:
json_encoded = jsonpickle.encode(json_to_serialize)
backup_server.write(json_encoded)
for user in user_list:
user_data = self.get_user_state(user)
json_encoded = jsonpickle.encode(user_data)
with open(self.user_fname(user), 'w') as userfile:
userfile.write(json_encoded)
class ReadableQueue(object):
def __init__(self, queue=[], pos=0):
self.hashmap = { "queue" : [(i, e) for i,e in enumerate(queue)],
"pos" : pos }
return
def queue(self):
return self.hashmap['queue']
def is_empty(self):
return len(self.queue()) == 0
def is_finished(self):
return self.pos() == len(self.queue())
def pos(self):
return self.hashmap['pos']
def set_pos(self, val):
self.hashmap['pos'] = val
def get_next(self, offset=1):
if self.pos() < len(self.queue()):
temp_queue = self.queue()[self.pos(): self.pos() + offset]
self.set_pos(self.pos() + offset)
if self.pos() > len(self.queue()): self.set_pos(len(self.queue()))
return temp_queue
def read_out_next(self, offset=1):
return " ".join([readable.read_out(index) for index,readable in self.get_next(offset)])
def has_prev(self):
return self.pos() > 0
def get_prev(self, offset=1):
if self.pos() > 0:
self.set_pos(self.pos() - offset)
if self.pos() < 0:
offset = offset + self.pos()
# [1, current(2), 3] get_prev(offeset=3)
# pos :=> -2, offset :=> 3-2 = 1, pos :=> 0, then read 0 to 1
self.set_pos(0)
return self.queue()[self.pos() : offset]
return None
def read_out_prev(self, offset=1):
return " ".join([readable.read_out() for readable in self.get_prev(offset)])
#Local cache caches tokens for different users
local_cache = LocalCache()
def strip_html(text):
""" Get rid of ugly twitter html """
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rply_msg = ""
if len(replying_to) > 0:
rply_msg = "Replying to "
for token in replying_to[:-1]: rply_msg += token+","
if len(replying_to)>1: rply_msg += 'and '
rply_msg += replying_to[-1]+". "
return rply_msg + " ".join(message)
text = reply_to(text)
text = text.replace('@', ' ')
return " ".join([token for token in text.split()
if ('http:' not in token) and ('https:' not in token)])
class Tweet(object):
def __init__(self, json_obj):
self.tweet = json_obj
def get_id(self):
return self.tweet['id']
def get_raw_text(self):
return self.tweet['text']
def _process_text(self):
text = strip_html(self.tweet['text'])
user_mentions = self.tweet['entities']['user_mentions']
text = text.replace('@', 'at ')
for user in user_mentions:
text = text.replace(user['screen_name'], user['name'])
return text
def get_screen_name(self):
return self.tweet['user']['screen_name']
def get_user_name(self):
return self.tweet['user']['name']
def read_out(self, index):
text = self._process_text()
return "tweet number {num} by {user} : {text} ,".format(num=index+1,
user=self.get_user_name(),
text = text)
def detailed_description(self):
response_builder = ["This tweet was posted by {user_name} whose twitter handle is {screen_name} the account description reads: {description}."
.format(screen_name=self.tweet['user']['screen_name'],
user_name=self.tweet['user']['name'],
description=self.tweet['user']['description'])]
if self.tweet['retweeted']:
response_builder += ["It's been retweeted {} times.".format(self.tweet['retweet_count'])]
if self.tweet['favorited']:
response_builder += ["{} people have favorited it.".format(self.tweet['favorites_count'])]
if self.tweet["in_reply_to_screen_name"]:
response_builder += ["it was posted in response to user {}.".format(self.tweet['in_reply_to_screen_name'])]
response_builder += ["the text of the tweet is, {}.".format(self._process_text())]
return " ".join(response_builder)
def user_mentions(self):
return self.tweet['user_mentions']
def get_cached_access_pair(uid):
if uid in local_cache.users():
access_token = local_cache.get_user_state(uid)['access_token']
access_secret = local_cache.get_user_state(uid)['access_secret']
return access_token, access_secret
else:
raise ValueError
def get_request_token(callback_url=None):
url = "https://api.twitter.com/oauth/request_token"
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret)
params = { "oauth_callback" : callback_url }
r = requests.post(url, auth=auth, params=params)
response_obj = parse_qs(r.text)
local_cache.update_server_state({ "request_token" : response_obj['oauth_token'][0],
"request_secret": response_obj['oauth_token_secret'][0] })
return response_obj['oauth_token_secret'], response_obj['oauth_token']
def authenticate_user_page(callback_url="", metadata=None):
url = "https://api.twitter.com/oauth/authenticate"
oauth_secret, oauth_token = get_request_token(callback_url)
local_cache.update_server_state({'metadata' : metadata })
params = { "force_login" : True,
"oauth_token": oauth_token }
r = requests.get(url, params=params)
return r.text
def post_tweet(user_id, message, additional_params={}):
"""
Helper function to post a tweet
"""
url = "https://api.twitter.com/1.1/statuses/update.json"
params = { "status" : message }
params.update(additional_params)
r = make_twitter_request(url, user_id, params, request_type='POST')
print (r.text)
return "Successfully posted a tweet {}".format(message)
def get_access_token(oauth_token, oauth_verifier):
url = "https://api.twitter.com/oauth/access_token"
params = {"oauth_verifier" : oauth_verifier}
server_state = local_cache.get_server_state()
request_token = server_state['request_token']
request_secret = server_state['request_secret']
consumer_key, consumer_secret = server_state['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret, request_token, request_secret)
r = requests.post(url, params = params, auth=auth)
response_obj = parse_qs(r.text)
uid = response_obj['oauth_token'][0]
print ("Access token", uid)
local_cache.set_user_state(user_id = uid,
state = { "access_token" : response_obj['oauth_token'][0],
"access_secret" : response_obj['oauth_token_secret'][0],
'twitter_user_id': response_obj['user_id'][0],
'screen_name' : response_obj ['screen_name'][0]
})
local_cache.serialize()
fragments = {
"state" : local_cache.get_server_state()['metadata']['state'],
"access_token" : uid,
"token_type" : "Bearer"
}
return urlencode(fragments)
def get_twitter_auth(user_id):
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
access_token, access_secret = get_cached_access_pair(user_id)
return OAuth1(consumer_key, consumer_secret, access_token, access_secret)
def process_tweets(tweet_list):
""" Clean tweets and enumerate, preserving only things that we are interested in """
return [Tweet(tweet) for tweet in tweet_list]
def make_twitter_request(url, user_id, params={}, request_type='GET'):
""" Generically make a request to twitter API using a particular user's authorization """
if request_type == "GET":
return requests.get(url, auth=get_twitter_auth(user_id), params=params)
elif request_type == "POST":
return requests.post(url, auth=get_twitter_auth(user_id), params=params)
def get_user_twitter_details(user_id, params={}):
url = "https://api.twitter.com/1.1/users/lookup.json"
user_cache = local_cache.get_user_state(user_id)
params.update({"user_id": user_cache['twitter_user_id'] })
response = make_twitter_request(url, user_id, params)
return response.json()
def geo_search(user_id, search_location):
"""
Search for a location - free form
"""
url = "https://api.twitter.com/1.1/geo/search.json"
params = {"query" : search_location }
response = make_twitter_request(url, user_id, params).json()
return response
def closest_trend_search(user_id, params={}):
#url = "https://api.twitter.com/1.1/trends/place.json"
url = "https://api.twitter.com/1.1/trends/closest.json"
response = make_twitter_request(url, user_id, params).json()
return response
def list_trends(user_id, woe_id):
url = "https://api.twitter.com/1.1/trends/place.json"
params = { "id" : woe_id }
response = make_twitter_request(url, user_id, params).json()
return response
def read_out_tweets(processed_tweets, speech_convertor=None):
"""
Input - list of processed 'Tweets'
output - list of spoken responses
"""
return ["tweet number {num} by {user}. {text}.".format(num=index+1, user=user, text=text)
for index, (user, text) in enumerate(processed_tweets)]
def request_tweet_list(url, user_id, params={}):
return process_tweets(make_twitter_request(url, user_id).json())
def get_home_tweets(user_id, input_params={}):
url = "https://api.twitter.com/1.1/statuses/home_timeline.json"
print ("Trying to get home tweets")
response = request_tweet_list(url, user_id)
return response
def get_retweets_of_me(user_id, input_params={}):
""" returns recently retweeted tweets """
url = "https://api.twitter.com/1.1/statuses/retweets_of_me.json"
print ("trying to get retweets")
return request_tweet_list(url, user_id)
def get_my_favourite_tweets(user_id, input_params = {}):
""" Returns a user's favourite tweets """
url = "https://api.twitter.com/1.1/favorites/list.json"
return request_tweet_list(url, user_id)
def get_user_latest_tweets(user_id, params={}):
url = "https://api.twitter.com/1.1/statuses/user_timeline.json?"
return request_tweet_list(url, user_id, params)
def get_latest_twitter_mentions(user_id):
url = "https://api.twitter.com/1.1/statuses/mentions_timeline.json"
return request_tweet_list(url, user_id)
def search_for_tweets_about(user_id, params):
""" Search twitter API """
url = "https://api.twitter.com/1.1/search/tweets.json"
response = make_twitter_request(url, user_id, params)
return process_tweets(response.json()["statuses"])
|
anjishnu/ask-alexa-pykit | examples/twitter/twitter.py | post_tweet | python | def post_tweet(user_id, message, additional_params={}):
url = "https://api.twitter.com/1.1/statuses/update.json"
params = { "status" : message }
params.update(additional_params)
r = make_twitter_request(url, user_id, params, request_type='POST')
print (r.text)
return "Successfully posted a tweet {}".format(message) | Helper function to post a tweet | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/twitter.py#L274-L283 | [
"def make_twitter_request(url, user_id, params={}, request_type='GET'):\n \"\"\" Generically make a request to twitter API using a particular user's authorization \"\"\"\n if request_type == \"GET\":\n return requests.get(url, auth=get_twitter_auth(user_id), params=params)\n elif request_type == \"P... | import requests
import jsonpickle
from requests_oauthlib import OAuth1
from urllib.parse import parse_qs, urlencode
import cherrypy
from collections import defaultdict
import json
import os
import re
from collections import defaultdict
# For readable serializations
jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
class LocalCache(object):
""" Generic class for encapsulating twitter credential caching """
server_data_template = "{}.server"
user_data_template = "{0}.user.{1}"
def __init__(self, backup = "tmp/twitter.cache"):
self.backup = backup #Unique identifier for the backup of this cache
self.memcache = {
"users" : defaultdict(lambda : {}),
"server": defaultdict(lambda : {})
}
self.deserialize()
def users(self):
return self.memcache['users']
def set_user_state(self, user_id, state):
self.memcache['users'][user_id] = state
def update_user_state(self, user_id, state = {}):
self.memcache['users'][user_id].update(state)
def get_user_state(self, user_id):
return self.memcache['users'][user_id]
def clear_user_state(self, user_id):
return self.memcache['users'][user_id].clear()
def update_server_state(self, state_dict):
self.memcache['server'].update(state_dict)
def get_server_state(self):
return self.memcache['server']
def clear_server_state(self):
return self.memcache['server'].clear()
def initialize_user_queue(self, user_id, queue):
self.memcache['users'][user_id]['user_queue'] = ReadableQueue(queue)
def user_queue(self, user_id):
if 'user_queue' in self.memcache['users'][user_id]:
return self.memcache['users'][user_id]['user_queue']
def server_fname(self):
return self.server_data_template.format(self.backup)
def user_fname(self, user):
return self.user_data_template.format(self.backup, user)
def deserialize(self):
cache_loaded = False
if os.path.exists(self.server_fname()) and not os.path.isdir(self.backup):
try:
self.memcache = { "server" : {},
"users" : {} }
with open(self.server_fname()) as backupfile:
print ("Attempting to reload cache")
self.memcache['server'] = jsonpickle.decode(backupfile.read())
print ("Server cache loaded", json.dumps(self.memcache, indent=4))
for user in self.memcache['server']['user_list']:
# Try to load as much user data as possible
if os.path.exists(self.user_fname(user)):
print ("found path for user", user)
with open(self.user_fname(user)) as userfile:
user_data = jsonpickle.decode(userfile.read())
self.memcache['users'][user] = user_data
cache_loaded = True
except Exception as e:
print ("Cache file corrupted...")
raise e
if not cache_loaded:
print ("Cache could not be loaded")
pass
else:
print ("CACHE LOADED SUCCESSFULLY!")
def serialize(self):
json_to_serialize = self.memcache['server']
user_list = list(self.users().keys())
json_to_serialize.update({"user_list" : user_list})
with open(self.server_fname(), 'w') as backup_server:
# Serialize Server:
json_encoded = jsonpickle.encode(json_to_serialize)
backup_server.write(json_encoded)
for user in user_list:
user_data = self.get_user_state(user)
json_encoded = jsonpickle.encode(user_data)
with open(self.user_fname(user), 'w') as userfile:
userfile.write(json_encoded)
class ReadableQueue(object):
def __init__(self, queue=[], pos=0):
self.hashmap = { "queue" : [(i, e) for i,e in enumerate(queue)],
"pos" : pos }
return
def queue(self):
return self.hashmap['queue']
def is_empty(self):
return len(self.queue()) == 0
def is_finished(self):
return self.pos() == len(self.queue())
def pos(self):
return self.hashmap['pos']
def set_pos(self, val):
self.hashmap['pos'] = val
def get_next(self, offset=1):
if self.pos() < len(self.queue()):
temp_queue = self.queue()[self.pos(): self.pos() + offset]
self.set_pos(self.pos() + offset)
if self.pos() > len(self.queue()): self.set_pos(len(self.queue()))
return temp_queue
def read_out_next(self, offset=1):
return " ".join([readable.read_out(index) for index,readable in self.get_next(offset)])
def has_prev(self):
return self.pos() > 0
def get_prev(self, offset=1):
if self.pos() > 0:
self.set_pos(self.pos() - offset)
if self.pos() < 0:
offset = offset + self.pos()
# [1, current(2), 3] get_prev(offeset=3)
# pos :=> -2, offset :=> 3-2 = 1, pos :=> 0, then read 0 to 1
self.set_pos(0)
return self.queue()[self.pos() : offset]
return None
def read_out_prev(self, offset=1):
return " ".join([readable.read_out() for readable in self.get_prev(offset)])
#Local cache caches tokens for different users
local_cache = LocalCache()
def strip_html(text):
""" Get rid of ugly twitter html """
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rply_msg = ""
if len(replying_to) > 0:
rply_msg = "Replying to "
for token in replying_to[:-1]: rply_msg += token+","
if len(replying_to)>1: rply_msg += 'and '
rply_msg += replying_to[-1]+". "
return rply_msg + " ".join(message)
text = reply_to(text)
text = text.replace('@', ' ')
return " ".join([token for token in text.split()
if ('http:' not in token) and ('https:' not in token)])
class Tweet(object):
def __init__(self, json_obj):
self.tweet = json_obj
def get_id(self):
return self.tweet['id']
def get_raw_text(self):
return self.tweet['text']
def _process_text(self):
text = strip_html(self.tweet['text'])
user_mentions = self.tweet['entities']['user_mentions']
text = text.replace('@', 'at ')
for user in user_mentions:
text = text.replace(user['screen_name'], user['name'])
return text
def get_screen_name(self):
return self.tweet['user']['screen_name']
def get_user_name(self):
return self.tweet['user']['name']
def read_out(self, index):
text = self._process_text()
return "tweet number {num} by {user} : {text} ,".format(num=index+1,
user=self.get_user_name(),
text = text)
def detailed_description(self):
response_builder = ["This tweet was posted by {user_name} whose twitter handle is {screen_name} the account description reads: {description}."
.format(screen_name=self.tweet['user']['screen_name'],
user_name=self.tweet['user']['name'],
description=self.tweet['user']['description'])]
if self.tweet['retweeted']:
response_builder += ["It's been retweeted {} times.".format(self.tweet['retweet_count'])]
if self.tweet['favorited']:
response_builder += ["{} people have favorited it.".format(self.tweet['favorites_count'])]
if self.tweet["in_reply_to_screen_name"]:
response_builder += ["it was posted in response to user {}.".format(self.tweet['in_reply_to_screen_name'])]
response_builder += ["the text of the tweet is, {}.".format(self._process_text())]
return " ".join(response_builder)
def user_mentions(self):
return self.tweet['user_mentions']
def get_cached_access_pair(uid):
if uid in local_cache.users():
access_token = local_cache.get_user_state(uid)['access_token']
access_secret = local_cache.get_user_state(uid)['access_secret']
return access_token, access_secret
else:
raise ValueError
def get_request_token(callback_url=None):
url = "https://api.twitter.com/oauth/request_token"
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret)
params = { "oauth_callback" : callback_url }
r = requests.post(url, auth=auth, params=params)
response_obj = parse_qs(r.text)
local_cache.update_server_state({ "request_token" : response_obj['oauth_token'][0],
"request_secret": response_obj['oauth_token_secret'][0] })
return response_obj['oauth_token_secret'], response_obj['oauth_token']
def authenticate_user_page(callback_url="", metadata=None):
url = "https://api.twitter.com/oauth/authenticate"
oauth_secret, oauth_token = get_request_token(callback_url)
local_cache.update_server_state({'metadata' : metadata })
params = { "force_login" : True,
"oauth_token": oauth_token }
r = requests.get(url, params=params)
return r.text
def get_access_token(oauth_token, oauth_verifier):
url = "https://api.twitter.com/oauth/access_token"
params = {"oauth_verifier" : oauth_verifier}
server_state = local_cache.get_server_state()
request_token = server_state['request_token']
request_secret = server_state['request_secret']
consumer_key, consumer_secret = server_state['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret, request_token, request_secret)
r = requests.post(url, params = params, auth=auth)
response_obj = parse_qs(r.text)
uid = response_obj['oauth_token'][0]
print ("Access token", uid)
local_cache.set_user_state(user_id = uid,
state = { "access_token" : response_obj['oauth_token'][0],
"access_secret" : response_obj['oauth_token_secret'][0],
'twitter_user_id': response_obj['user_id'][0],
'screen_name' : response_obj ['screen_name'][0]
})
local_cache.serialize()
fragments = {
"state" : local_cache.get_server_state()['metadata']['state'],
"access_token" : uid,
"token_type" : "Bearer"
}
return urlencode(fragments)
def get_twitter_auth(user_id):
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
access_token, access_secret = get_cached_access_pair(user_id)
return OAuth1(consumer_key, consumer_secret, access_token, access_secret)
def process_tweets(tweet_list):
""" Clean tweets and enumerate, preserving only things that we are interested in """
return [Tweet(tweet) for tweet in tweet_list]
def make_twitter_request(url, user_id, params={}, request_type='GET'):
""" Generically make a request to twitter API using a particular user's authorization """
if request_type == "GET":
return requests.get(url, auth=get_twitter_auth(user_id), params=params)
elif request_type == "POST":
return requests.post(url, auth=get_twitter_auth(user_id), params=params)
def get_user_twitter_details(user_id, params={}):
url = "https://api.twitter.com/1.1/users/lookup.json"
user_cache = local_cache.get_user_state(user_id)
params.update({"user_id": user_cache['twitter_user_id'] })
response = make_twitter_request(url, user_id, params)
return response.json()
def geo_search(user_id, search_location):
"""
Search for a location - free form
"""
url = "https://api.twitter.com/1.1/geo/search.json"
params = {"query" : search_location }
response = make_twitter_request(url, user_id, params).json()
return response
def closest_trend_search(user_id, params={}):
#url = "https://api.twitter.com/1.1/trends/place.json"
url = "https://api.twitter.com/1.1/trends/closest.json"
response = make_twitter_request(url, user_id, params).json()
return response
def list_trends(user_id, woe_id):
url = "https://api.twitter.com/1.1/trends/place.json"
params = { "id" : woe_id }
response = make_twitter_request(url, user_id, params).json()
return response
def read_out_tweets(processed_tweets, speech_convertor=None):
"""
Input - list of processed 'Tweets'
output - list of spoken responses
"""
return ["tweet number {num} by {user}. {text}.".format(num=index+1, user=user, text=text)
for index, (user, text) in enumerate(processed_tweets)]
def request_tweet_list(url, user_id, params={}):
return process_tweets(make_twitter_request(url, user_id).json())
def get_home_tweets(user_id, input_params={}):
url = "https://api.twitter.com/1.1/statuses/home_timeline.json"
print ("Trying to get home tweets")
response = request_tweet_list(url, user_id)
return response
def get_retweets_of_me(user_id, input_params={}):
""" returns recently retweeted tweets """
url = "https://api.twitter.com/1.1/statuses/retweets_of_me.json"
print ("trying to get retweets")
return request_tweet_list(url, user_id)
def get_my_favourite_tweets(user_id, input_params = {}):
""" Returns a user's favourite tweets """
url = "https://api.twitter.com/1.1/favorites/list.json"
return request_tweet_list(url, user_id)
def get_user_latest_tweets(user_id, params={}):
url = "https://api.twitter.com/1.1/statuses/user_timeline.json?"
return request_tweet_list(url, user_id, params)
def get_latest_twitter_mentions(user_id):
url = "https://api.twitter.com/1.1/statuses/mentions_timeline.json"
return request_tweet_list(url, user_id)
def search_for_tweets_about(user_id, params):
""" Search twitter API """
url = "https://api.twitter.com/1.1/search/tweets.json"
response = make_twitter_request(url, user_id, params)
return process_tweets(response.json()["statuses"])
|
anjishnu/ask-alexa-pykit | examples/twitter/twitter.py | make_twitter_request | python | def make_twitter_request(url, user_id, params={}, request_type='GET'):
if request_type == "GET":
return requests.get(url, auth=get_twitter_auth(user_id), params=params)
elif request_type == "POST":
return requests.post(url, auth=get_twitter_auth(user_id), params=params) | Generically make a request to twitter API using a particular user's authorization | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/twitter.py#L333-L338 | [
"def get_twitter_auth(user_id):\n consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']\n access_token, access_secret = get_cached_access_pair(user_id)\n return OAuth1(consumer_key, consumer_secret, access_token, access_secret)\n"
] | import requests
import jsonpickle
from requests_oauthlib import OAuth1
from urllib.parse import parse_qs, urlencode
import cherrypy
from collections import defaultdict
import json
import os
import re
from collections import defaultdict
# For readable serializations
jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
class LocalCache(object):
""" Generic class for encapsulating twitter credential caching """
server_data_template = "{}.server"
user_data_template = "{0}.user.{1}"
def __init__(self, backup = "tmp/twitter.cache"):
self.backup = backup #Unique identifier for the backup of this cache
self.memcache = {
"users" : defaultdict(lambda : {}),
"server": defaultdict(lambda : {})
}
self.deserialize()
def users(self):
return self.memcache['users']
def set_user_state(self, user_id, state):
self.memcache['users'][user_id] = state
def update_user_state(self, user_id, state = {}):
self.memcache['users'][user_id].update(state)
def get_user_state(self, user_id):
return self.memcache['users'][user_id]
def clear_user_state(self, user_id):
return self.memcache['users'][user_id].clear()
def update_server_state(self, state_dict):
self.memcache['server'].update(state_dict)
def get_server_state(self):
return self.memcache['server']
def clear_server_state(self):
return self.memcache['server'].clear()
def initialize_user_queue(self, user_id, queue):
self.memcache['users'][user_id]['user_queue'] = ReadableQueue(queue)
def user_queue(self, user_id):
if 'user_queue' in self.memcache['users'][user_id]:
return self.memcache['users'][user_id]['user_queue']
def server_fname(self):
return self.server_data_template.format(self.backup)
def user_fname(self, user):
return self.user_data_template.format(self.backup, user)
def deserialize(self):
cache_loaded = False
if os.path.exists(self.server_fname()) and not os.path.isdir(self.backup):
try:
self.memcache = { "server" : {},
"users" : {} }
with open(self.server_fname()) as backupfile:
print ("Attempting to reload cache")
self.memcache['server'] = jsonpickle.decode(backupfile.read())
print ("Server cache loaded", json.dumps(self.memcache, indent=4))
for user in self.memcache['server']['user_list']:
# Try to load as much user data as possible
if os.path.exists(self.user_fname(user)):
print ("found path for user", user)
with open(self.user_fname(user)) as userfile:
user_data = jsonpickle.decode(userfile.read())
self.memcache['users'][user] = user_data
cache_loaded = True
except Exception as e:
print ("Cache file corrupted...")
raise e
if not cache_loaded:
print ("Cache could not be loaded")
pass
else:
print ("CACHE LOADED SUCCESSFULLY!")
def serialize(self):
json_to_serialize = self.memcache['server']
user_list = list(self.users().keys())
json_to_serialize.update({"user_list" : user_list})
with open(self.server_fname(), 'w') as backup_server:
# Serialize Server:
json_encoded = jsonpickle.encode(json_to_serialize)
backup_server.write(json_encoded)
for user in user_list:
user_data = self.get_user_state(user)
json_encoded = jsonpickle.encode(user_data)
with open(self.user_fname(user), 'w') as userfile:
userfile.write(json_encoded)
class ReadableQueue(object):
def __init__(self, queue=[], pos=0):
self.hashmap = { "queue" : [(i, e) for i,e in enumerate(queue)],
"pos" : pos }
return
def queue(self):
return self.hashmap['queue']
def is_empty(self):
return len(self.queue()) == 0
def is_finished(self):
return self.pos() == len(self.queue())
def pos(self):
return self.hashmap['pos']
def set_pos(self, val):
self.hashmap['pos'] = val
def get_next(self, offset=1):
if self.pos() < len(self.queue()):
temp_queue = self.queue()[self.pos(): self.pos() + offset]
self.set_pos(self.pos() + offset)
if self.pos() > len(self.queue()): self.set_pos(len(self.queue()))
return temp_queue
def read_out_next(self, offset=1):
return " ".join([readable.read_out(index) for index,readable in self.get_next(offset)])
def has_prev(self):
return self.pos() > 0
def get_prev(self, offset=1):
if self.pos() > 0:
self.set_pos(self.pos() - offset)
if self.pos() < 0:
offset = offset + self.pos()
# [1, current(2), 3] get_prev(offeset=3)
# pos :=> -2, offset :=> 3-2 = 1, pos :=> 0, then read 0 to 1
self.set_pos(0)
return self.queue()[self.pos() : offset]
return None
def read_out_prev(self, offset=1):
return " ".join([readable.read_out() for readable in self.get_prev(offset)])
#Local cache caches tokens for different users
local_cache = LocalCache()
def strip_html(text):
""" Get rid of ugly twitter html """
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rply_msg = ""
if len(replying_to) > 0:
rply_msg = "Replying to "
for token in replying_to[:-1]: rply_msg += token+","
if len(replying_to)>1: rply_msg += 'and '
rply_msg += replying_to[-1]+". "
return rply_msg + " ".join(message)
text = reply_to(text)
text = text.replace('@', ' ')
return " ".join([token for token in text.split()
if ('http:' not in token) and ('https:' not in token)])
class Tweet(object):
def __init__(self, json_obj):
self.tweet = json_obj
def get_id(self):
return self.tweet['id']
def get_raw_text(self):
return self.tweet['text']
def _process_text(self):
text = strip_html(self.tweet['text'])
user_mentions = self.tweet['entities']['user_mentions']
text = text.replace('@', 'at ')
for user in user_mentions:
text = text.replace(user['screen_name'], user['name'])
return text
def get_screen_name(self):
return self.tweet['user']['screen_name']
def get_user_name(self):
return self.tweet['user']['name']
def read_out(self, index):
text = self._process_text()
return "tweet number {num} by {user} : {text} ,".format(num=index+1,
user=self.get_user_name(),
text = text)
def detailed_description(self):
response_builder = ["This tweet was posted by {user_name} whose twitter handle is {screen_name} the account description reads: {description}."
.format(screen_name=self.tweet['user']['screen_name'],
user_name=self.tweet['user']['name'],
description=self.tweet['user']['description'])]
if self.tweet['retweeted']:
response_builder += ["It's been retweeted {} times.".format(self.tweet['retweet_count'])]
if self.tweet['favorited']:
response_builder += ["{} people have favorited it.".format(self.tweet['favorites_count'])]
if self.tweet["in_reply_to_screen_name"]:
response_builder += ["it was posted in response to user {}.".format(self.tweet['in_reply_to_screen_name'])]
response_builder += ["the text of the tweet is, {}.".format(self._process_text())]
return " ".join(response_builder)
def user_mentions(self):
return self.tweet['user_mentions']
def get_cached_access_pair(uid):
if uid in local_cache.users():
access_token = local_cache.get_user_state(uid)['access_token']
access_secret = local_cache.get_user_state(uid)['access_secret']
return access_token, access_secret
else:
raise ValueError
def get_request_token(callback_url=None):
url = "https://api.twitter.com/oauth/request_token"
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret)
params = { "oauth_callback" : callback_url }
r = requests.post(url, auth=auth, params=params)
response_obj = parse_qs(r.text)
local_cache.update_server_state({ "request_token" : response_obj['oauth_token'][0],
"request_secret": response_obj['oauth_token_secret'][0] })
return response_obj['oauth_token_secret'], response_obj['oauth_token']
def authenticate_user_page(callback_url="", metadata=None):
url = "https://api.twitter.com/oauth/authenticate"
oauth_secret, oauth_token = get_request_token(callback_url)
local_cache.update_server_state({'metadata' : metadata })
params = { "force_login" : True,
"oauth_token": oauth_token }
r = requests.get(url, params=params)
return r.text
def post_tweet(user_id, message, additional_params={}):
"""
Helper function to post a tweet
"""
url = "https://api.twitter.com/1.1/statuses/update.json"
params = { "status" : message }
params.update(additional_params)
r = make_twitter_request(url, user_id, params, request_type='POST')
print (r.text)
return "Successfully posted a tweet {}".format(message)
def get_access_token(oauth_token, oauth_verifier):
url = "https://api.twitter.com/oauth/access_token"
params = {"oauth_verifier" : oauth_verifier}
server_state = local_cache.get_server_state()
request_token = server_state['request_token']
request_secret = server_state['request_secret']
consumer_key, consumer_secret = server_state['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret, request_token, request_secret)
r = requests.post(url, params = params, auth=auth)
response_obj = parse_qs(r.text)
uid = response_obj['oauth_token'][0]
print ("Access token", uid)
local_cache.set_user_state(user_id = uid,
state = { "access_token" : response_obj['oauth_token'][0],
"access_secret" : response_obj['oauth_token_secret'][0],
'twitter_user_id': response_obj['user_id'][0],
'screen_name' : response_obj ['screen_name'][0]
})
local_cache.serialize()
fragments = {
"state" : local_cache.get_server_state()['metadata']['state'],
"access_token" : uid,
"token_type" : "Bearer"
}
return urlencode(fragments)
def get_twitter_auth(user_id):
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
access_token, access_secret = get_cached_access_pair(user_id)
return OAuth1(consumer_key, consumer_secret, access_token, access_secret)
def process_tweets(tweet_list):
""" Clean tweets and enumerate, preserving only things that we are interested in """
return [Tweet(tweet) for tweet in tweet_list]
def get_user_twitter_details(user_id, params={}):
url = "https://api.twitter.com/1.1/users/lookup.json"
user_cache = local_cache.get_user_state(user_id)
params.update({"user_id": user_cache['twitter_user_id'] })
response = make_twitter_request(url, user_id, params)
return response.json()
def geo_search(user_id, search_location):
"""
Search for a location - free form
"""
url = "https://api.twitter.com/1.1/geo/search.json"
params = {"query" : search_location }
response = make_twitter_request(url, user_id, params).json()
return response
def closest_trend_search(user_id, params={}):
#url = "https://api.twitter.com/1.1/trends/place.json"
url = "https://api.twitter.com/1.1/trends/closest.json"
response = make_twitter_request(url, user_id, params).json()
return response
def list_trends(user_id, woe_id):
url = "https://api.twitter.com/1.1/trends/place.json"
params = { "id" : woe_id }
response = make_twitter_request(url, user_id, params).json()
return response
def read_out_tweets(processed_tweets, speech_convertor=None):
"""
Input - list of processed 'Tweets'
output - list of spoken responses
"""
return ["tweet number {num} by {user}. {text}.".format(num=index+1, user=user, text=text)
for index, (user, text) in enumerate(processed_tweets)]
def request_tweet_list(url, user_id, params={}):
return process_tweets(make_twitter_request(url, user_id).json())
def get_home_tweets(user_id, input_params={}):
url = "https://api.twitter.com/1.1/statuses/home_timeline.json"
print ("Trying to get home tweets")
response = request_tweet_list(url, user_id)
return response
def get_retweets_of_me(user_id, input_params={}):
""" returns recently retweeted tweets """
url = "https://api.twitter.com/1.1/statuses/retweets_of_me.json"
print ("trying to get retweets")
return request_tweet_list(url, user_id)
def get_my_favourite_tweets(user_id, input_params = {}):
""" Returns a user's favourite tweets """
url = "https://api.twitter.com/1.1/favorites/list.json"
return request_tweet_list(url, user_id)
def get_user_latest_tweets(user_id, params={}):
url = "https://api.twitter.com/1.1/statuses/user_timeline.json?"
return request_tweet_list(url, user_id, params)
def get_latest_twitter_mentions(user_id):
url = "https://api.twitter.com/1.1/statuses/mentions_timeline.json"
return request_tweet_list(url, user_id)
def search_for_tweets_about(user_id, params):
""" Search twitter API """
url = "https://api.twitter.com/1.1/search/tweets.json"
response = make_twitter_request(url, user_id, params)
return process_tweets(response.json()["statuses"])
|
anjishnu/ask-alexa-pykit | examples/twitter/twitter.py | geo_search | python | def geo_search(user_id, search_location):
url = "https://api.twitter.com/1.1/geo/search.json"
params = {"query" : search_location }
response = make_twitter_request(url, user_id, params).json()
return response | Search for a location - free form | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/twitter.py#L350-L357 | [
"def make_twitter_request(url, user_id, params={}, request_type='GET'):\n \"\"\" Generically make a request to twitter API using a particular user's authorization \"\"\"\n if request_type == \"GET\":\n return requests.get(url, auth=get_twitter_auth(user_id), params=params)\n elif request_type == \"P... | import requests
import jsonpickle
from requests_oauthlib import OAuth1
from urllib.parse import parse_qs, urlencode
import cherrypy
from collections import defaultdict
import json
import os
import re
from collections import defaultdict
# For readable serializations
jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
class LocalCache(object):
""" Generic class for encapsulating twitter credential caching """
server_data_template = "{}.server"
user_data_template = "{0}.user.{1}"
def __init__(self, backup = "tmp/twitter.cache"):
self.backup = backup #Unique identifier for the backup of this cache
self.memcache = {
"users" : defaultdict(lambda : {}),
"server": defaultdict(lambda : {})
}
self.deserialize()
def users(self):
return self.memcache['users']
def set_user_state(self, user_id, state):
self.memcache['users'][user_id] = state
def update_user_state(self, user_id, state = {}):
self.memcache['users'][user_id].update(state)
def get_user_state(self, user_id):
return self.memcache['users'][user_id]
def clear_user_state(self, user_id):
return self.memcache['users'][user_id].clear()
def update_server_state(self, state_dict):
self.memcache['server'].update(state_dict)
def get_server_state(self):
return self.memcache['server']
def clear_server_state(self):
return self.memcache['server'].clear()
def initialize_user_queue(self, user_id, queue):
self.memcache['users'][user_id]['user_queue'] = ReadableQueue(queue)
def user_queue(self, user_id):
if 'user_queue' in self.memcache['users'][user_id]:
return self.memcache['users'][user_id]['user_queue']
def server_fname(self):
return self.server_data_template.format(self.backup)
def user_fname(self, user):
return self.user_data_template.format(self.backup, user)
def deserialize(self):
cache_loaded = False
if os.path.exists(self.server_fname()) and not os.path.isdir(self.backup):
try:
self.memcache = { "server" : {},
"users" : {} }
with open(self.server_fname()) as backupfile:
print ("Attempting to reload cache")
self.memcache['server'] = jsonpickle.decode(backupfile.read())
print ("Server cache loaded", json.dumps(self.memcache, indent=4))
for user in self.memcache['server']['user_list']:
# Try to load as much user data as possible
if os.path.exists(self.user_fname(user)):
print ("found path for user", user)
with open(self.user_fname(user)) as userfile:
user_data = jsonpickle.decode(userfile.read())
self.memcache['users'][user] = user_data
cache_loaded = True
except Exception as e:
print ("Cache file corrupted...")
raise e
if not cache_loaded:
print ("Cache could not be loaded")
pass
else:
print ("CACHE LOADED SUCCESSFULLY!")
def serialize(self):
json_to_serialize = self.memcache['server']
user_list = list(self.users().keys())
json_to_serialize.update({"user_list" : user_list})
with open(self.server_fname(), 'w') as backup_server:
# Serialize Server:
json_encoded = jsonpickle.encode(json_to_serialize)
backup_server.write(json_encoded)
for user in user_list:
user_data = self.get_user_state(user)
json_encoded = jsonpickle.encode(user_data)
with open(self.user_fname(user), 'w') as userfile:
userfile.write(json_encoded)
class ReadableQueue(object):
def __init__(self, queue=[], pos=0):
self.hashmap = { "queue" : [(i, e) for i,e in enumerate(queue)],
"pos" : pos }
return
def queue(self):
return self.hashmap['queue']
def is_empty(self):
return len(self.queue()) == 0
def is_finished(self):
return self.pos() == len(self.queue())
def pos(self):
return self.hashmap['pos']
def set_pos(self, val):
self.hashmap['pos'] = val
def get_next(self, offset=1):
if self.pos() < len(self.queue()):
temp_queue = self.queue()[self.pos(): self.pos() + offset]
self.set_pos(self.pos() + offset)
if self.pos() > len(self.queue()): self.set_pos(len(self.queue()))
return temp_queue
def read_out_next(self, offset=1):
return " ".join([readable.read_out(index) for index,readable in self.get_next(offset)])
def has_prev(self):
return self.pos() > 0
def get_prev(self, offset=1):
if self.pos() > 0:
self.set_pos(self.pos() - offset)
if self.pos() < 0:
offset = offset + self.pos()
# [1, current(2), 3] get_prev(offeset=3)
# pos :=> -2, offset :=> 3-2 = 1, pos :=> 0, then read 0 to 1
self.set_pos(0)
return self.queue()[self.pos() : offset]
return None
def read_out_prev(self, offset=1):
return " ".join([readable.read_out() for readable in self.get_prev(offset)])
#Local cache caches tokens for different users
local_cache = LocalCache()
def strip_html(text):
""" Get rid of ugly twitter html """
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rply_msg = ""
if len(replying_to) > 0:
rply_msg = "Replying to "
for token in replying_to[:-1]: rply_msg += token+","
if len(replying_to)>1: rply_msg += 'and '
rply_msg += replying_to[-1]+". "
return rply_msg + " ".join(message)
text = reply_to(text)
text = text.replace('@', ' ')
return " ".join([token for token in text.split()
if ('http:' not in token) and ('https:' not in token)])
class Tweet(object):
def __init__(self, json_obj):
self.tweet = json_obj
def get_id(self):
return self.tweet['id']
def get_raw_text(self):
return self.tweet['text']
def _process_text(self):
text = strip_html(self.tweet['text'])
user_mentions = self.tweet['entities']['user_mentions']
text = text.replace('@', 'at ')
for user in user_mentions:
text = text.replace(user['screen_name'], user['name'])
return text
def get_screen_name(self):
return self.tweet['user']['screen_name']
def get_user_name(self):
return self.tweet['user']['name']
def read_out(self, index):
text = self._process_text()
return "tweet number {num} by {user} : {text} ,".format(num=index+1,
user=self.get_user_name(),
text = text)
def detailed_description(self):
response_builder = ["This tweet was posted by {user_name} whose twitter handle is {screen_name} the account description reads: {description}."
.format(screen_name=self.tweet['user']['screen_name'],
user_name=self.tweet['user']['name'],
description=self.tweet['user']['description'])]
if self.tweet['retweeted']:
response_builder += ["It's been retweeted {} times.".format(self.tweet['retweet_count'])]
if self.tweet['favorited']:
response_builder += ["{} people have favorited it.".format(self.tweet['favorites_count'])]
if self.tweet["in_reply_to_screen_name"]:
response_builder += ["it was posted in response to user {}.".format(self.tweet['in_reply_to_screen_name'])]
response_builder += ["the text of the tweet is, {}.".format(self._process_text())]
return " ".join(response_builder)
def user_mentions(self):
return self.tweet['user_mentions']
def get_cached_access_pair(uid):
if uid in local_cache.users():
access_token = local_cache.get_user_state(uid)['access_token']
access_secret = local_cache.get_user_state(uid)['access_secret']
return access_token, access_secret
else:
raise ValueError
def get_request_token(callback_url=None):
url = "https://api.twitter.com/oauth/request_token"
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret)
params = { "oauth_callback" : callback_url }
r = requests.post(url, auth=auth, params=params)
response_obj = parse_qs(r.text)
local_cache.update_server_state({ "request_token" : response_obj['oauth_token'][0],
"request_secret": response_obj['oauth_token_secret'][0] })
return response_obj['oauth_token_secret'], response_obj['oauth_token']
def authenticate_user_page(callback_url="", metadata=None):
url = "https://api.twitter.com/oauth/authenticate"
oauth_secret, oauth_token = get_request_token(callback_url)
local_cache.update_server_state({'metadata' : metadata })
params = { "force_login" : True,
"oauth_token": oauth_token }
r = requests.get(url, params=params)
return r.text
def post_tweet(user_id, message, additional_params={}):
"""
Helper function to post a tweet
"""
url = "https://api.twitter.com/1.1/statuses/update.json"
params = { "status" : message }
params.update(additional_params)
r = make_twitter_request(url, user_id, params, request_type='POST')
print (r.text)
return "Successfully posted a tweet {}".format(message)
def get_access_token(oauth_token, oauth_verifier):
url = "https://api.twitter.com/oauth/access_token"
params = {"oauth_verifier" : oauth_verifier}
server_state = local_cache.get_server_state()
request_token = server_state['request_token']
request_secret = server_state['request_secret']
consumer_key, consumer_secret = server_state['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret, request_token, request_secret)
r = requests.post(url, params = params, auth=auth)
response_obj = parse_qs(r.text)
uid = response_obj['oauth_token'][0]
print ("Access token", uid)
local_cache.set_user_state(user_id = uid,
state = { "access_token" : response_obj['oauth_token'][0],
"access_secret" : response_obj['oauth_token_secret'][0],
'twitter_user_id': response_obj['user_id'][0],
'screen_name' : response_obj ['screen_name'][0]
})
local_cache.serialize()
fragments = {
"state" : local_cache.get_server_state()['metadata']['state'],
"access_token" : uid,
"token_type" : "Bearer"
}
return urlencode(fragments)
def get_twitter_auth(user_id):
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
access_token, access_secret = get_cached_access_pair(user_id)
return OAuth1(consumer_key, consumer_secret, access_token, access_secret)
def process_tweets(tweet_list):
""" Clean tweets and enumerate, preserving only things that we are interested in """
return [Tweet(tweet) for tweet in tweet_list]
def make_twitter_request(url, user_id, params={}, request_type='GET'):
""" Generically make a request to twitter API using a particular user's authorization """
if request_type == "GET":
return requests.get(url, auth=get_twitter_auth(user_id), params=params)
elif request_type == "POST":
return requests.post(url, auth=get_twitter_auth(user_id), params=params)
def get_user_twitter_details(user_id, params={}):
url = "https://api.twitter.com/1.1/users/lookup.json"
user_cache = local_cache.get_user_state(user_id)
params.update({"user_id": user_cache['twitter_user_id'] })
response = make_twitter_request(url, user_id, params)
return response.json()
def closest_trend_search(user_id, params={}):
#url = "https://api.twitter.com/1.1/trends/place.json"
url = "https://api.twitter.com/1.1/trends/closest.json"
response = make_twitter_request(url, user_id, params).json()
return response
def list_trends(user_id, woe_id):
url = "https://api.twitter.com/1.1/trends/place.json"
params = { "id" : woe_id }
response = make_twitter_request(url, user_id, params).json()
return response
def read_out_tweets(processed_tweets, speech_convertor=None):
"""
Input - list of processed 'Tweets'
output - list of spoken responses
"""
return ["tweet number {num} by {user}. {text}.".format(num=index+1, user=user, text=text)
for index, (user, text) in enumerate(processed_tweets)]
def request_tweet_list(url, user_id, params={}):
return process_tweets(make_twitter_request(url, user_id).json())
def get_home_tweets(user_id, input_params={}):
url = "https://api.twitter.com/1.1/statuses/home_timeline.json"
print ("Trying to get home tweets")
response = request_tweet_list(url, user_id)
return response
def get_retweets_of_me(user_id, input_params={}):
""" returns recently retweeted tweets """
url = "https://api.twitter.com/1.1/statuses/retweets_of_me.json"
print ("trying to get retweets")
return request_tweet_list(url, user_id)
def get_my_favourite_tweets(user_id, input_params = {}):
""" Returns a user's favourite tweets """
url = "https://api.twitter.com/1.1/favorites/list.json"
return request_tweet_list(url, user_id)
def get_user_latest_tweets(user_id, params={}):
url = "https://api.twitter.com/1.1/statuses/user_timeline.json?"
return request_tweet_list(url, user_id, params)
def get_latest_twitter_mentions(user_id):
url = "https://api.twitter.com/1.1/statuses/mentions_timeline.json"
return request_tweet_list(url, user_id)
def search_for_tweets_about(user_id, params):
""" Search twitter API """
url = "https://api.twitter.com/1.1/search/tweets.json"
response = make_twitter_request(url, user_id, params)
return process_tweets(response.json()["statuses"])
|
anjishnu/ask-alexa-pykit | examples/twitter/twitter.py | read_out_tweets | python | def read_out_tweets(processed_tweets, speech_convertor=None):
return ["tweet number {num} by {user}. {text}.".format(num=index+1, user=user, text=text)
for index, (user, text) in enumerate(processed_tweets)] | Input - list of processed 'Tweets'
output - list of spoken responses | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/twitter.py#L374-L380 | null | import requests
import jsonpickle
from requests_oauthlib import OAuth1
from urllib.parse import parse_qs, urlencode
import cherrypy
from collections import defaultdict
import json
import os
import re
from collections import defaultdict
# For readable serializations
jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
class LocalCache(object):
""" Generic class for encapsulating twitter credential caching """
server_data_template = "{}.server"
user_data_template = "{0}.user.{1}"
def __init__(self, backup = "tmp/twitter.cache"):
self.backup = backup #Unique identifier for the backup of this cache
self.memcache = {
"users" : defaultdict(lambda : {}),
"server": defaultdict(lambda : {})
}
self.deserialize()
def users(self):
return self.memcache['users']
def set_user_state(self, user_id, state):
self.memcache['users'][user_id] = state
def update_user_state(self, user_id, state = {}):
self.memcache['users'][user_id].update(state)
def get_user_state(self, user_id):
return self.memcache['users'][user_id]
def clear_user_state(self, user_id):
return self.memcache['users'][user_id].clear()
def update_server_state(self, state_dict):
self.memcache['server'].update(state_dict)
def get_server_state(self):
return self.memcache['server']
def clear_server_state(self):
return self.memcache['server'].clear()
def initialize_user_queue(self, user_id, queue):
self.memcache['users'][user_id]['user_queue'] = ReadableQueue(queue)
def user_queue(self, user_id):
if 'user_queue' in self.memcache['users'][user_id]:
return self.memcache['users'][user_id]['user_queue']
def server_fname(self):
return self.server_data_template.format(self.backup)
def user_fname(self, user):
return self.user_data_template.format(self.backup, user)
def deserialize(self):
cache_loaded = False
if os.path.exists(self.server_fname()) and not os.path.isdir(self.backup):
try:
self.memcache = { "server" : {},
"users" : {} }
with open(self.server_fname()) as backupfile:
print ("Attempting to reload cache")
self.memcache['server'] = jsonpickle.decode(backupfile.read())
print ("Server cache loaded", json.dumps(self.memcache, indent=4))
for user in self.memcache['server']['user_list']:
# Try to load as much user data as possible
if os.path.exists(self.user_fname(user)):
print ("found path for user", user)
with open(self.user_fname(user)) as userfile:
user_data = jsonpickle.decode(userfile.read())
self.memcache['users'][user] = user_data
cache_loaded = True
except Exception as e:
print ("Cache file corrupted...")
raise e
if not cache_loaded:
print ("Cache could not be loaded")
pass
else:
print ("CACHE LOADED SUCCESSFULLY!")
def serialize(self):
json_to_serialize = self.memcache['server']
user_list = list(self.users().keys())
json_to_serialize.update({"user_list" : user_list})
with open(self.server_fname(), 'w') as backup_server:
# Serialize Server:
json_encoded = jsonpickle.encode(json_to_serialize)
backup_server.write(json_encoded)
for user in user_list:
user_data = self.get_user_state(user)
json_encoded = jsonpickle.encode(user_data)
with open(self.user_fname(user), 'w') as userfile:
userfile.write(json_encoded)
class ReadableQueue(object):
def __init__(self, queue=[], pos=0):
self.hashmap = { "queue" : [(i, e) for i,e in enumerate(queue)],
"pos" : pos }
return
def queue(self):
return self.hashmap['queue']
def is_empty(self):
return len(self.queue()) == 0
def is_finished(self):
return self.pos() == len(self.queue())
def pos(self):
return self.hashmap['pos']
def set_pos(self, val):
self.hashmap['pos'] = val
def get_next(self, offset=1):
if self.pos() < len(self.queue()):
temp_queue = self.queue()[self.pos(): self.pos() + offset]
self.set_pos(self.pos() + offset)
if self.pos() > len(self.queue()): self.set_pos(len(self.queue()))
return temp_queue
def read_out_next(self, offset=1):
return " ".join([readable.read_out(index) for index,readable in self.get_next(offset)])
def has_prev(self):
return self.pos() > 0
def get_prev(self, offset=1):
if self.pos() > 0:
self.set_pos(self.pos() - offset)
if self.pos() < 0:
offset = offset + self.pos()
# [1, current(2), 3] get_prev(offeset=3)
# pos :=> -2, offset :=> 3-2 = 1, pos :=> 0, then read 0 to 1
self.set_pos(0)
return self.queue()[self.pos() : offset]
return None
def read_out_prev(self, offset=1):
return " ".join([readable.read_out() for readable in self.get_prev(offset)])
#Local cache caches tokens for different users
local_cache = LocalCache()
def strip_html(text):
""" Get rid of ugly twitter html """
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rply_msg = ""
if len(replying_to) > 0:
rply_msg = "Replying to "
for token in replying_to[:-1]: rply_msg += token+","
if len(replying_to)>1: rply_msg += 'and '
rply_msg += replying_to[-1]+". "
return rply_msg + " ".join(message)
text = reply_to(text)
text = text.replace('@', ' ')
return " ".join([token for token in text.split()
if ('http:' not in token) and ('https:' not in token)])
class Tweet(object):
def __init__(self, json_obj):
self.tweet = json_obj
def get_id(self):
return self.tweet['id']
def get_raw_text(self):
return self.tweet['text']
def _process_text(self):
text = strip_html(self.tweet['text'])
user_mentions = self.tweet['entities']['user_mentions']
text = text.replace('@', 'at ')
for user in user_mentions:
text = text.replace(user['screen_name'], user['name'])
return text
def get_screen_name(self):
return self.tweet['user']['screen_name']
def get_user_name(self):
return self.tweet['user']['name']
def read_out(self, index):
text = self._process_text()
return "tweet number {num} by {user} : {text} ,".format(num=index+1,
user=self.get_user_name(),
text = text)
def detailed_description(self):
response_builder = ["This tweet was posted by {user_name} whose twitter handle is {screen_name} the account description reads: {description}."
.format(screen_name=self.tweet['user']['screen_name'],
user_name=self.tweet['user']['name'],
description=self.tweet['user']['description'])]
if self.tweet['retweeted']:
response_builder += ["It's been retweeted {} times.".format(self.tweet['retweet_count'])]
if self.tweet['favorited']:
response_builder += ["{} people have favorited it.".format(self.tweet['favorites_count'])]
if self.tweet["in_reply_to_screen_name"]:
response_builder += ["it was posted in response to user {}.".format(self.tweet['in_reply_to_screen_name'])]
response_builder += ["the text of the tweet is, {}.".format(self._process_text())]
return " ".join(response_builder)
def user_mentions(self):
return self.tweet['user_mentions']
def get_cached_access_pair(uid):
if uid in local_cache.users():
access_token = local_cache.get_user_state(uid)['access_token']
access_secret = local_cache.get_user_state(uid)['access_secret']
return access_token, access_secret
else:
raise ValueError
def get_request_token(callback_url=None):
url = "https://api.twitter.com/oauth/request_token"
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret)
params = { "oauth_callback" : callback_url }
r = requests.post(url, auth=auth, params=params)
response_obj = parse_qs(r.text)
local_cache.update_server_state({ "request_token" : response_obj['oauth_token'][0],
"request_secret": response_obj['oauth_token_secret'][0] })
return response_obj['oauth_token_secret'], response_obj['oauth_token']
def authenticate_user_page(callback_url="", metadata=None):
url = "https://api.twitter.com/oauth/authenticate"
oauth_secret, oauth_token = get_request_token(callback_url)
local_cache.update_server_state({'metadata' : metadata })
params = { "force_login" : True,
"oauth_token": oauth_token }
r = requests.get(url, params=params)
return r.text
def post_tweet(user_id, message, additional_params={}):
"""
Helper function to post a tweet
"""
url = "https://api.twitter.com/1.1/statuses/update.json"
params = { "status" : message }
params.update(additional_params)
r = make_twitter_request(url, user_id, params, request_type='POST')
print (r.text)
return "Successfully posted a tweet {}".format(message)
def get_access_token(oauth_token, oauth_verifier):
url = "https://api.twitter.com/oauth/access_token"
params = {"oauth_verifier" : oauth_verifier}
server_state = local_cache.get_server_state()
request_token = server_state['request_token']
request_secret = server_state['request_secret']
consumer_key, consumer_secret = server_state['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret, request_token, request_secret)
r = requests.post(url, params = params, auth=auth)
response_obj = parse_qs(r.text)
uid = response_obj['oauth_token'][0]
print ("Access token", uid)
local_cache.set_user_state(user_id = uid,
state = { "access_token" : response_obj['oauth_token'][0],
"access_secret" : response_obj['oauth_token_secret'][0],
'twitter_user_id': response_obj['user_id'][0],
'screen_name' : response_obj ['screen_name'][0]
})
local_cache.serialize()
fragments = {
"state" : local_cache.get_server_state()['metadata']['state'],
"access_token" : uid,
"token_type" : "Bearer"
}
return urlencode(fragments)
def get_twitter_auth(user_id):
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
access_token, access_secret = get_cached_access_pair(user_id)
return OAuth1(consumer_key, consumer_secret, access_token, access_secret)
def process_tweets(tweet_list):
""" Clean tweets and enumerate, preserving only things that we are interested in """
return [Tweet(tweet) for tweet in tweet_list]
def make_twitter_request(url, user_id, params={}, request_type='GET'):
""" Generically make a request to twitter API using a particular user's authorization """
if request_type == "GET":
return requests.get(url, auth=get_twitter_auth(user_id), params=params)
elif request_type == "POST":
return requests.post(url, auth=get_twitter_auth(user_id), params=params)
def get_user_twitter_details(user_id, params={}):
url = "https://api.twitter.com/1.1/users/lookup.json"
user_cache = local_cache.get_user_state(user_id)
params.update({"user_id": user_cache['twitter_user_id'] })
response = make_twitter_request(url, user_id, params)
return response.json()
def geo_search(user_id, search_location):
"""
Search for a location - free form
"""
url = "https://api.twitter.com/1.1/geo/search.json"
params = {"query" : search_location }
response = make_twitter_request(url, user_id, params).json()
return response
def closest_trend_search(user_id, params={}):
#url = "https://api.twitter.com/1.1/trends/place.json"
url = "https://api.twitter.com/1.1/trends/closest.json"
response = make_twitter_request(url, user_id, params).json()
return response
def list_trends(user_id, woe_id):
url = "https://api.twitter.com/1.1/trends/place.json"
params = { "id" : woe_id }
response = make_twitter_request(url, user_id, params).json()
return response
def request_tweet_list(url, user_id, params={}):
return process_tweets(make_twitter_request(url, user_id).json())
def get_home_tweets(user_id, input_params={}):
url = "https://api.twitter.com/1.1/statuses/home_timeline.json"
print ("Trying to get home tweets")
response = request_tweet_list(url, user_id)
return response
def get_retweets_of_me(user_id, input_params={}):
""" returns recently retweeted tweets """
url = "https://api.twitter.com/1.1/statuses/retweets_of_me.json"
print ("trying to get retweets")
return request_tweet_list(url, user_id)
def get_my_favourite_tweets(user_id, input_params = {}):
""" Returns a user's favourite tweets """
url = "https://api.twitter.com/1.1/favorites/list.json"
return request_tweet_list(url, user_id)
def get_user_latest_tweets(user_id, params={}):
url = "https://api.twitter.com/1.1/statuses/user_timeline.json?"
return request_tweet_list(url, user_id, params)
def get_latest_twitter_mentions(user_id):
url = "https://api.twitter.com/1.1/statuses/mentions_timeline.json"
return request_tweet_list(url, user_id)
def search_for_tweets_about(user_id, params):
""" Search twitter API """
url = "https://api.twitter.com/1.1/search/tweets.json"
response = make_twitter_request(url, user_id, params)
return process_tweets(response.json()["statuses"])
|
anjishnu/ask-alexa-pykit | examples/twitter/twitter.py | search_for_tweets_about | python | def search_for_tweets_about(user_id, params):
url = "https://api.twitter.com/1.1/search/tweets.json"
response = make_twitter_request(url, user_id, params)
return process_tweets(response.json()["statuses"]) | Search twitter API | train | https://github.com/anjishnu/ask-alexa-pykit/blob/a47c278ca7a60532bbe1a9b789f6c37e609fea8b/examples/twitter/twitter.py#L417-L421 | [
"def make_twitter_request(url, user_id, params={}, request_type='GET'):\n \"\"\" Generically make a request to twitter API using a particular user's authorization \"\"\"\n if request_type == \"GET\":\n return requests.get(url, auth=get_twitter_auth(user_id), params=params)\n elif request_type == \"P... | import requests
import jsonpickle
from requests_oauthlib import OAuth1
from urllib.parse import parse_qs, urlencode
import cherrypy
from collections import defaultdict
import json
import os
import re
from collections import defaultdict
# For readable serializations
jsonpickle.set_encoder_options('json', sort_keys=True, indent=4)
class LocalCache(object):
""" Generic class for encapsulating twitter credential caching """
server_data_template = "{}.server"
user_data_template = "{0}.user.{1}"
def __init__(self, backup = "tmp/twitter.cache"):
self.backup = backup #Unique identifier for the backup of this cache
self.memcache = {
"users" : defaultdict(lambda : {}),
"server": defaultdict(lambda : {})
}
self.deserialize()
def users(self):
return self.memcache['users']
def set_user_state(self, user_id, state):
self.memcache['users'][user_id] = state
def update_user_state(self, user_id, state = {}):
self.memcache['users'][user_id].update(state)
def get_user_state(self, user_id):
return self.memcache['users'][user_id]
def clear_user_state(self, user_id):
return self.memcache['users'][user_id].clear()
def update_server_state(self, state_dict):
self.memcache['server'].update(state_dict)
def get_server_state(self):
return self.memcache['server']
def clear_server_state(self):
return self.memcache['server'].clear()
def initialize_user_queue(self, user_id, queue):
self.memcache['users'][user_id]['user_queue'] = ReadableQueue(queue)
def user_queue(self, user_id):
if 'user_queue' in self.memcache['users'][user_id]:
return self.memcache['users'][user_id]['user_queue']
def server_fname(self):
return self.server_data_template.format(self.backup)
def user_fname(self, user):
return self.user_data_template.format(self.backup, user)
def deserialize(self):
cache_loaded = False
if os.path.exists(self.server_fname()) and not os.path.isdir(self.backup):
try:
self.memcache = { "server" : {},
"users" : {} }
with open(self.server_fname()) as backupfile:
print ("Attempting to reload cache")
self.memcache['server'] = jsonpickle.decode(backupfile.read())
print ("Server cache loaded", json.dumps(self.memcache, indent=4))
for user in self.memcache['server']['user_list']:
# Try to load as much user data as possible
if os.path.exists(self.user_fname(user)):
print ("found path for user", user)
with open(self.user_fname(user)) as userfile:
user_data = jsonpickle.decode(userfile.read())
self.memcache['users'][user] = user_data
cache_loaded = True
except Exception as e:
print ("Cache file corrupted...")
raise e
if not cache_loaded:
print ("Cache could not be loaded")
pass
else:
print ("CACHE LOADED SUCCESSFULLY!")
def serialize(self):
json_to_serialize = self.memcache['server']
user_list = list(self.users().keys())
json_to_serialize.update({"user_list" : user_list})
with open(self.server_fname(), 'w') as backup_server:
# Serialize Server:
json_encoded = jsonpickle.encode(json_to_serialize)
backup_server.write(json_encoded)
for user in user_list:
user_data = self.get_user_state(user)
json_encoded = jsonpickle.encode(user_data)
with open(self.user_fname(user), 'w') as userfile:
userfile.write(json_encoded)
class ReadableQueue(object):
def __init__(self, queue=[], pos=0):
self.hashmap = { "queue" : [(i, e) for i,e in enumerate(queue)],
"pos" : pos }
return
def queue(self):
return self.hashmap['queue']
def is_empty(self):
return len(self.queue()) == 0
def is_finished(self):
return self.pos() == len(self.queue())
def pos(self):
return self.hashmap['pos']
def set_pos(self, val):
self.hashmap['pos'] = val
def get_next(self, offset=1):
if self.pos() < len(self.queue()):
temp_queue = self.queue()[self.pos(): self.pos() + offset]
self.set_pos(self.pos() + offset)
if self.pos() > len(self.queue()): self.set_pos(len(self.queue()))
return temp_queue
def read_out_next(self, offset=1):
return " ".join([readable.read_out(index) for index,readable in self.get_next(offset)])
def has_prev(self):
return self.pos() > 0
def get_prev(self, offset=1):
if self.pos() > 0:
self.set_pos(self.pos() - offset)
if self.pos() < 0:
offset = offset + self.pos()
# [1, current(2), 3] get_prev(offeset=3)
# pos :=> -2, offset :=> 3-2 = 1, pos :=> 0, then read 0 to 1
self.set_pos(0)
return self.queue()[self.pos() : offset]
return None
def read_out_prev(self, offset=1):
return " ".join([readable.read_out() for readable in self.get_prev(offset)])
#Local cache caches tokens for different users
local_cache = LocalCache()
def strip_html(text):
""" Get rid of ugly twitter html """
def reply_to(text):
replying_to = []
split_text = text.split()
for index, token in enumerate(split_text):
if token.startswith('@'): replying_to.append(token[1:])
else:
message = split_text[index:]
break
rply_msg = ""
if len(replying_to) > 0:
rply_msg = "Replying to "
for token in replying_to[:-1]: rply_msg += token+","
if len(replying_to)>1: rply_msg += 'and '
rply_msg += replying_to[-1]+". "
return rply_msg + " ".join(message)
text = reply_to(text)
text = text.replace('@', ' ')
return " ".join([token for token in text.split()
if ('http:' not in token) and ('https:' not in token)])
class Tweet(object):
def __init__(self, json_obj):
self.tweet = json_obj
def get_id(self):
return self.tweet['id']
def get_raw_text(self):
return self.tweet['text']
def _process_text(self):
text = strip_html(self.tweet['text'])
user_mentions = self.tweet['entities']['user_mentions']
text = text.replace('@', 'at ')
for user in user_mentions:
text = text.replace(user['screen_name'], user['name'])
return text
def get_screen_name(self):
return self.tweet['user']['screen_name']
def get_user_name(self):
return self.tweet['user']['name']
def read_out(self, index):
text = self._process_text()
return "tweet number {num} by {user} : {text} ,".format(num=index+1,
user=self.get_user_name(),
text = text)
def detailed_description(self):
response_builder = ["This tweet was posted by {user_name} whose twitter handle is {screen_name} the account description reads: {description}."
.format(screen_name=self.tweet['user']['screen_name'],
user_name=self.tweet['user']['name'],
description=self.tweet['user']['description'])]
if self.tweet['retweeted']:
response_builder += ["It's been retweeted {} times.".format(self.tweet['retweet_count'])]
if self.tweet['favorited']:
response_builder += ["{} people have favorited it.".format(self.tweet['favorites_count'])]
if self.tweet["in_reply_to_screen_name"]:
response_builder += ["it was posted in response to user {}.".format(self.tweet['in_reply_to_screen_name'])]
response_builder += ["the text of the tweet is, {}.".format(self._process_text())]
return " ".join(response_builder)
def user_mentions(self):
return self.tweet['user_mentions']
def get_cached_access_pair(uid):
if uid in local_cache.users():
access_token = local_cache.get_user_state(uid)['access_token']
access_secret = local_cache.get_user_state(uid)['access_secret']
return access_token, access_secret
else:
raise ValueError
def get_request_token(callback_url=None):
url = "https://api.twitter.com/oauth/request_token"
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret)
params = { "oauth_callback" : callback_url }
r = requests.post(url, auth=auth, params=params)
response_obj = parse_qs(r.text)
local_cache.update_server_state({ "request_token" : response_obj['oauth_token'][0],
"request_secret": response_obj['oauth_token_secret'][0] })
return response_obj['oauth_token_secret'], response_obj['oauth_token']
def authenticate_user_page(callback_url="", metadata=None):
url = "https://api.twitter.com/oauth/authenticate"
oauth_secret, oauth_token = get_request_token(callback_url)
local_cache.update_server_state({'metadata' : metadata })
params = { "force_login" : True,
"oauth_token": oauth_token }
r = requests.get(url, params=params)
return r.text
def post_tweet(user_id, message, additional_params={}):
"""
Helper function to post a tweet
"""
url = "https://api.twitter.com/1.1/statuses/update.json"
params = { "status" : message }
params.update(additional_params)
r = make_twitter_request(url, user_id, params, request_type='POST')
print (r.text)
return "Successfully posted a tweet {}".format(message)
def get_access_token(oauth_token, oauth_verifier):
url = "https://api.twitter.com/oauth/access_token"
params = {"oauth_verifier" : oauth_verifier}
server_state = local_cache.get_server_state()
request_token = server_state['request_token']
request_secret = server_state['request_secret']
consumer_key, consumer_secret = server_state['twitter_keys']
auth = OAuth1(consumer_key, consumer_secret, request_token, request_secret)
r = requests.post(url, params = params, auth=auth)
response_obj = parse_qs(r.text)
uid = response_obj['oauth_token'][0]
print ("Access token", uid)
local_cache.set_user_state(user_id = uid,
state = { "access_token" : response_obj['oauth_token'][0],
"access_secret" : response_obj['oauth_token_secret'][0],
'twitter_user_id': response_obj['user_id'][0],
'screen_name' : response_obj ['screen_name'][0]
})
local_cache.serialize()
fragments = {
"state" : local_cache.get_server_state()['metadata']['state'],
"access_token" : uid,
"token_type" : "Bearer"
}
return urlencode(fragments)
def get_twitter_auth(user_id):
consumer_key, consumer_secret = local_cache.get_server_state()['twitter_keys']
access_token, access_secret = get_cached_access_pair(user_id)
return OAuth1(consumer_key, consumer_secret, access_token, access_secret)
def process_tweets(tweet_list):
""" Clean tweets and enumerate, preserving only things that we are interested in """
return [Tweet(tweet) for tweet in tweet_list]
def make_twitter_request(url, user_id, params={}, request_type='GET'):
""" Generically make a request to twitter API using a particular user's authorization """
if request_type == "GET":
return requests.get(url, auth=get_twitter_auth(user_id), params=params)
elif request_type == "POST":
return requests.post(url, auth=get_twitter_auth(user_id), params=params)
def get_user_twitter_details(user_id, params={}):
url = "https://api.twitter.com/1.1/users/lookup.json"
user_cache = local_cache.get_user_state(user_id)
params.update({"user_id": user_cache['twitter_user_id'] })
response = make_twitter_request(url, user_id, params)
return response.json()
def geo_search(user_id, search_location):
"""
Search for a location - free form
"""
url = "https://api.twitter.com/1.1/geo/search.json"
params = {"query" : search_location }
response = make_twitter_request(url, user_id, params).json()
return response
def closest_trend_search(user_id, params={}):
#url = "https://api.twitter.com/1.1/trends/place.json"
url = "https://api.twitter.com/1.1/trends/closest.json"
response = make_twitter_request(url, user_id, params).json()
return response
def list_trends(user_id, woe_id):
url = "https://api.twitter.com/1.1/trends/place.json"
params = { "id" : woe_id }
response = make_twitter_request(url, user_id, params).json()
return response
def read_out_tweets(processed_tweets, speech_convertor=None):
"""
Input - list of processed 'Tweets'
output - list of spoken responses
"""
return ["tweet number {num} by {user}. {text}.".format(num=index+1, user=user, text=text)
for index, (user, text) in enumerate(processed_tweets)]
def request_tweet_list(url, user_id, params={}):
return process_tweets(make_twitter_request(url, user_id).json())
def get_home_tweets(user_id, input_params={}):
url = "https://api.twitter.com/1.1/statuses/home_timeline.json"
print ("Trying to get home tweets")
response = request_tweet_list(url, user_id)
return response
def get_retweets_of_me(user_id, input_params={}):
""" returns recently retweeted tweets """
url = "https://api.twitter.com/1.1/statuses/retweets_of_me.json"
print ("trying to get retweets")
return request_tweet_list(url, user_id)
def get_my_favourite_tweets(user_id, input_params = {}):
""" Returns a user's favourite tweets """
url = "https://api.twitter.com/1.1/favorites/list.json"
return request_tweet_list(url, user_id)
def get_user_latest_tweets(user_id, params={}):
url = "https://api.twitter.com/1.1/statuses/user_timeline.json?"
return request_tweet_list(url, user_id, params)
def get_latest_twitter_mentions(user_id):
url = "https://api.twitter.com/1.1/statuses/mentions_timeline.json"
return request_tweet_list(url, user_id)
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom._serializeBooleans | python | def _serializeBooleans(params):
"
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower() | Convert all booleans to lowercase strings | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L37-L50 | null | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.request | python | def request(self, method, url, parameters=dict()):
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response | Requests wrapper function | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L52-L97 | [
"def _serializeBooleans(params):\n \"\"\"\"Convert all booleans to lowercase strings\"\"\"\n serialized = {}\n for name, value in params.items():\n if value is True:\n value = 'true'\n elif value is False:\n value = 'false'\n serialized[name] = value\n return s... | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.actions | python | def actions(self, **parameters):
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions'] | Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
} | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L99-L183 | [
"def request(self, method, url, parameters=dict()):\n \"\"\"Requests wrapper function\"\"\"\n\n # The requests library uses urllib, which serializes to \"True\"/\"False\" while Pingdom requires lowercase\n parameters = self._serializeBooleans(parameters)\n\n headers = {'App-Key': self.apikey}\n if se... | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.getChecks | python | def getChecks(self, **parameters):
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']] | Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L191-L219 | [
"def request(self, method, url, parameters=dict()):\n \"\"\"Requests wrapper function\"\"\"\n\n # The requests library uses urllib, which serializes to \"True\"/\"False\" while Pingdom requires lowercase\n parameters = self._serializeBooleans(parameters)\n\n headers = {'App-Key': self.apikey}\n if se... | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.getCheck | python | def getCheck(self, checkid):
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check | Returns a detailed description of a specified check. | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L221-L226 | [
"def getDetails(self):\n \"\"\"Update check details, returns dictionary of details\"\"\"\n\n response = self.pingdom.request('GET', 'checks/%s' % self.id)\n self.__addDetails__(response.json()['check'])\n return response.json()['check']\n"
] | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.probes | python | def probes(self, **kwargs):
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes'] | Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
] | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L621-L664 | [
"def request(self, method, url, parameters=dict()):\n \"\"\"Requests wrapper function\"\"\"\n\n # The requests library uses urllib, which serializes to \"True\"/\"False\" while Pingdom requires lowercase\n parameters = self._serializeBooleans(parameters)\n\n headers = {'App-Key': self.apikey}\n if se... | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.traceroute | python | def traceroute(self, host, probeid):
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute'] | Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
} | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L733-L749 | [
"def request(self, method, url, parameters=dict()):\n \"\"\"Requests wrapper function\"\"\"\n\n # The requests library uses urllib, which serializes to \"True\"/\"False\" while Pingdom requires lowercase\n parameters = self._serializeBooleans(parameters)\n\n headers = {'App-Key': self.apikey}\n if se... | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.getContacts | python | def getContacts(self, **kwargs):
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']] | Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
] | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L756-L793 | [
"def request(self, method, url, parameters=dict()):\n \"\"\"Requests wrapper function\"\"\"\n\n # The requests library uses urllib, which serializes to \"True\"/\"False\" while Pingdom requires lowercase\n parameters = self._serializeBooleans(parameters)\n\n headers = {'App-Key': self.apikey}\n if se... | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.newContact | python | def newContact(self, name, **kwargs):
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo) | Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L795-L844 | [
"def request(self, method, url, parameters=dict()):\n \"\"\"Requests wrapper function\"\"\"\n\n # The requests library uses urllib, which serializes to \"True\"/\"False\" while Pingdom requires lowercase\n parameters = self._serializeBooleans(parameters)\n\n headers = {'App-Key': self.apikey}\n if se... | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def modifyContacts(self, contactids, paused):
"""Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message
"""
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message']
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
KennethWilke/PingdomLib | pingdomlib/pingdom.py | Pingdom.modifyContacts | python | def modifyContacts(self, contactids, paused):
response = self.request("PUT", "notification_contacts", {'contactids': contactids,
'paused': paused})
return response.json()['message'] | Modifies a list of contacts.
Provide comma separated list of contact ids and desired paused state
Returns status message | train | https://github.com/KennethWilke/PingdomLib/blob/3ed1e481f9c9d16b032558d62fb05c2166e162ed/pingdomlib/pingdom.py#L846-L856 | [
"def request(self, method, url, parameters=dict()):\n \"\"\"Requests wrapper function\"\"\"\n\n # The requests library uses urllib, which serializes to \"True\"/\"False\" while Pingdom requires lowercase\n parameters = self._serializeBooleans(parameters)\n\n headers = {'App-Key': self.apikey}\n if se... | class Pingdom(object):
"""Main connection object to interact with pingdom
Attributes:
* pushChanges -- This boolean controls if changes are automatically
pushed to pingdom
* shortlimit -- String containing short api rate limit details
* longlimit -- String containing long api rate limit details
"""
def __init__(self, username, password, apikey, accountemail=None,
pushchanges=True, server=server_address):
self.pushChanges = pushchanges
self.username = username
self.password = password
self.apikey = apikey
self.accountemail = accountemail
self.url = '%s/api/%s/' % (server, api_version)
self.shortlimit = ''
self.longlimit = ''
@staticmethod
def _serializeBooleans(params):
""""Convert all booleans to lowercase strings"""
serialized = {}
for name, value in params.items():
if value is True:
value = 'true'
elif value is False:
value = 'false'
serialized[name] = value
return serialized
for k, v in params.items():
if isinstance(v, bool):
params[k] = str(v).lower()
def request(self, method, url, parameters=dict()):
"""Requests wrapper function"""
# The requests library uses urllib, which serializes to "True"/"False" while Pingdom requires lowercase
parameters = self._serializeBooleans(parameters)
headers = {'App-Key': self.apikey}
if self.accountemail:
headers.update({'Account-Email': self.accountemail})
# Method selection handling
if method.upper() == 'GET':
response = requests.get(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'POST':
response = requests.post(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'PUT':
response = requests.put(self.url + url, data=parameters,
auth=(self.username, self.password),
headers=headers)
elif method.upper() == 'DELETE':
response = requests.delete(self.url + url, params=parameters,
auth=(self.username, self.password),
headers=headers)
else:
raise Exception("Invalid method in pingdom request")
# Store pingdom api limits
self.shortlimit = response.headers.get(
'Req-Limit-Short',
self.shortlimit)
self.longlimit = response.headers.get(
'Req-Limit-Long',
self.longlimit)
# Verify OK response
if response.status_code != 200:
sys.stderr.write('ERROR from %s: %d' % (response.url,
response.status_code))
sys.stderr.write('Returned data: %s\n' % response.json())
response.raise_for_status()
return response
def actions(self, **parameters):
"""Returns a list of actions (alerts) that have been generated for
your account.
Optional Parameters:
* from -- Only include actions generated later than this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* to -- Only include actions generated prior to this timestamp.
Format is UNIX time.
Type: Integer
Default: None
* limit -- Limits the number of returned results to the specified
quantity.
Type: Integer (max 300)
Default: 100
* offset -- Offset for listing.
Type: Integer
Default: 0
* checkids -- Comma-separated list of check identifiers. Limit
results to actions generated from these checks.
Type: String
Default: All
* contactids -- Comma-separated list of contact identifiers.
Limit results to actions sent to these contacts.
Type: String
Default: All
* status -- Comma-separated list of statuses. Limit results to
actions with these statuses.
Type: String ['sent', 'delivered', 'error',
'not_delivered', 'no_credits']
Default: All
* via -- Comma-separated list of via mediums. Limit results to
actions with these mediums.
Type: String ['email', 'sms', 'twitter', 'iphone',
'android']
Default: All
Returned structure:
{
'alerts' : [
{
'contactname' : <String> Name of alerted contact
'contactid' : <String> Identifier of alerted contact
'checkid' : <String> Identifier of check
'time' : <Integer> Time of alert generation. Format
UNIX time
'via' : <String> Alert medium ['email', 'sms',
'twitter', 'iphone',
'android']
'status' : <String> Alert status ['sent', 'delivered',
'error',
'notdelivered',
'nocredits']
'messageshort': <String> Short description of message
'messagefull' : <String> Full message body
'sentto' : <String> Target address, phone number, etc
'charged' : <Boolean> True if your account was charged
for this message
},
...
]
}
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['from', 'to', 'limit', 'offset', 'checkids',
'contactids', 'status', 'via']:
sys.stderr.write('%s not a valid argument for actions()\n'
% key)
response = self.request('GET', 'actions', parameters)
return response.json()['actions']
def alerts(self, **parameters):
"""A short-hand version of 'actions', returns list of alerts.
See parameters for actions()"""
return self.actions(**parameters)['alerts']
def getChecks(self, **parameters):
"""Pulls all checks from pingdom
Optional Parameters:
* limit -- Limits the number of returned probes to the
specified quantity.
Type: Integer (max 25000)
Default: 25000
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
* tags -- Filter listing by tag/s
Type: String
Default: None
"""
# Warn user about unhandled parameters
for key in parameters:
if key not in ['limit', 'offset', 'tags']:
sys.stderr.write('%s not a valid argument for getChecks()\n'
% key)
response = self.request('GET', 'checks', parameters)
return [PingdomCheck(self, x) for x in response.json()['checks']]
def getCheck(self, checkid):
"""Returns a detailed description of a specified check."""
check = PingdomCheck(self, {'id': checkid})
check.getDetails()
return check
def getResults(self, checkid):
""" Returns detailed results for a specified check id."""
response = self.request('GET','results/%s' % checkid)
return response.json()
def newCheck(self, name, host, checktype='http', **kwargs):
"""Creates a new check with settings specified by provided parameters.
Provide new check name, hostname and type along with any additional
optional parameters passed as keywords. Returns new PingdomCheck
instance
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional parameters:
* paused -- Check should be paused
Type: Boolean
Default: False
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
Default: 5
* contactids -- Comma separated list of contact IDs
Type: String
Default: None
* sendtoemail -- Send alerts as email
Type: Boolean
Default: False
* sendtosms -- Send alerts as SMS
Type: Boolean
Default: False
* sendtotwitter -- Send alerts through Twitter
Type: Boolean
Default: False
* sendtoiphone -- Send alerts to iPhone
Type: Boolean
Default: False
* sendtoandroid -- Send alerts to Android
Type: Boolean
Default: False
* sendnotificationwhendown -- Send notification when check is down
the given number of times
Type: Integer
Default: 2
* notifyagainevery -- Set how many results to wait for in between
notices
Type: Integer
Default: 0
* notifywhenbackup -- Notify when back up again
Type: Boolean
Default: True
* use_legacy_notifications -- Use the old notifications instead of
BeepManager
Type: Boolean
Default: False
HTTP check options:
* url -- Target path on server
Type: String
Default: /
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* shouldcontain -- Target site should contain this string.
Cannot be combined with 'shouldnotcontain'
Type: String
Default: None
* shouldnotcontain -- Target site should not contain this string.
Cannot be combined with 'shouldcontain'
Type: String
Default: None
* postdata -- Data that should be posted to the web page,
for example submission data for a sign-up or login form.
The data needs to be formatted in the same way as a web browser
would send it to the web server
Type: String
Default: None
* requestheader<NAME> -- Custom HTTP header, replace <NAME> with
desired header name. Header in form: Header:Value
Type: String
Default: None
HTTPCustom check options:
* url -- Target path on server
Type: String
Mandatory
* encryption -- Use SSL/TLS
Type: Boolean
Default: False
* port -- Target server port
Type: Integer
Default: 80
* auth -- Username and password for HTTP authentication
Example: user:password
Type: String
Default: None
* additionalurls -- Colon-separated list of additonal URLS with
hostname included
Type: String
Default: None
TCP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
DNS check options:
* expectedip -- Expected IP
Type: String
Mandatory
* nameserver -- Nameserver to check
Type: String
Mandatory
UDP check options:
* port -- Target server port
Type: Integer
Mandatory
* stringtosend -- String to send
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
SMTP check options:
* port -- Target server port
Type: Integer
Default: 25
* auth -- Username and password for target SMTP authentication.
Example: user:password
Type: String
Default: None
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
POP3 check options:
* port -- Target server port
Type: Integer
Default: 110
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
IMAP check options:
* port -- Target server port
Type: Integer
Default: 143
* stringtoexpect -- String to expect in response
Type: String
Default: None
* encryption -- Use connection encryption
Type: Boolean
Default: False
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata',
'use_legacy_notifications']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'url',
'encryption', 'port', 'auth', 'additionalurls',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['alert_policy', 'autoresolve', 'paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname',
'expectedip', 'nameserver',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtosend', 'stringtoexpect',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'auth', 'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'contactids',
'sendtoemail', 'sendtosms', 'sendtotwitter',
'sendtoiphone', 'sendtoandroid',
'sendnotificationwhendown', 'notifyagainevery',
'notifywhenbackup', 'type', 'hostname', 'port',
'stringtoexpect', 'encryption',
'use_legacy_notifications']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of newCheck() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in newCheck()")
parameters = {'name': name, 'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request("POST", 'checks', parameters)
return self.getCheck(checkinfo.json()['check']['id'])
def modifyChecks(self, **kwargs):
"""Pause or change resolution for multiple checks in one bulk call.
Parameters:
* paused -- Check should be paused
Type: Boolean
* resolution -- Check resolution time (in minutes)
Type: Integer [1, 5, 15, 30, 60]
* checkids -- Comma-separated list of identifiers for checks to be
modified. Invalid check identifiers will be ignored.
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['paused', 'resolution', 'checkids']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newCheck()\n')
return self.request("PUT", "checks", kwargs).json()['message']
def deleteChecks(self, checkids):
"""Deletes a list of checks, CANNOT BE REVERSED!
Provide a comma-separated list of checkid's to delete
"""
return self.request("DELETE", "checks",
{'delcheckids': checkids}).json()['message']
def credits(self):
"""Gets credits list"""
return self.request("GET", "credits").json()['credits']
def probes(self, **kwargs):
"""Returns a list of all Pingdom probe servers
Parameters:
* limit -- Limits the number of returned probes to the specified
quantity
Type: Integer
* offset -- Offset for listing (requires limit).
Type: Integer
Default: 0
* onlyactive -- Return only active probes
Type: Boolean
Default: False
* includedeleted -- Include old probes that are no longer in use
Type: Boolean
Default: False
Returned structure:
[
{
'id' : <Integer> Unique probe id
'country' : <String> Country
'city' : <String> City
'name' : <String> Name
'active' : <Boolean> True if probe is active
'hostname' : <String> DNS name
'ip' : <String> IP address
'countryiso': <String> Country ISO code
},
...
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset', 'onlyactive', 'includedeleted']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of probes()\n')
return self.request("GET", "probes", kwargs).json()['probes']
def references(self):
"""Get a reference of regions, timezones and date/time/number formats
and their identifiers.
Returned structure:
{
'regions' :
[
{
'id' : <Integer> Region identifier
'description' : <String> Region description
'countryid' : <Integer> Corresponding country
identifier
'datetimeformatid' : <Integer> Corresponding datetimeformat
identifier
'numberformatid' : <Integer> Corresponding numberformat
identifer
'timezoneid' : <Integer> Corresponding timezone
identifier
},
...
],
'timezones' :
[
{
'id' : <Integer> Time zone identifier
'description' : <String> Time zone description
},
...
],
'datetimeformats' :
[
{
'id' : <Integer> Date/time format identifer
'description' : <String> Date/time format description
},
...
],
'numberformats' :
[
{
'id' : <Integer> Number format identifier
'description' : <String> Number format description
},
...
],
'countries' :
[
{
'id' : <Integer> Country id
'iso' : <String> Country ISO code
},
...
],
'phonecodes' :
[
{
'countryid' : <Integer> Country id
'name' : <String> Country name
'phonecode' : <String> Area phone code
},
...
]
}"""
return self.request("GET", "reference").json()
def traceroute(self, host, probeid):
"""Perform a traceroute to a specified target from a specified Pingdom
probe.
Provide hostname to check and probeid to check from
Returned structure:
{
'result' : <String> Traceroute output
'probeid' : <Integer> Probe identifier
'probedescription' : <String> Probe description
}
"""
response = self.request('GET', 'traceroute', {'host': host,
'probeid': probeid})
return response.json()['traceroute']
def servertime(self):
"""Get the current time of the API server in UNIX format"""
return self.request('GET', 'servertime').json()['servertime']
def getContacts(self, **kwargs):
"""Returns a list of all contacts.
Optional Parameters:
* limit -- Limits the number of returned contacts to the specified
quantity.
Type: Integer
Default: 100
* offset -- Offset for listing (requires limit.)
Type: Integer
Default: 0
Returned structure:
[
'id' : <Integer> Contact identifier
'name' : <String> Contact name
'email' : <String> Contact email
'cellphone' : <String> Contact telephone
'countryiso' : <String> Cellphone country ISO code
'defaultsmsprovider' : <String> Default SMS provider
'directtwitter' : <Boolean> Send Tweets as direct messages
'twitteruser' : <String> Twitter username
'paused' : <Boolean> True if contact is pasued
'iphonetokens' : <String list> iPhone tokens
'androidtokens' : <String list> android tokens
]
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['limit', 'offset']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of getContacts()\n')
return [PingdomContact(self, x) for x in
self.request("GET", "notification_contacts", kwargs).json()['contacts']]
def newContact(self, name, **kwargs):
"""Create a new contact.
Provide new contact name and any optional arguments. Returns new
PingdomContact instance
Optional Parameters:
* email -- Contact email address
Type: String
* cellphone -- Cellphone number, without the country code part. In
some countries you are supposed to exclude leading zeroes.
(Requires countrycode and countryiso)
Type: String
* countrycode -- Cellphone country code (Requires cellphone and
countryiso)
Type: String
* countryiso -- Cellphone country ISO code. For example: US (USA),
GB (Britain) or SE (Sweden) (Requires cellphone and
countrycode)
Type: String
* defaultsmsprovider -- Default SMS provider
Type: String ['clickatell', 'bulksms', 'esendex',
'cellsynt']
* directtwitter -- Send tweets as direct messages
Type: Boolean
Default: True
* twitteruser -- Twitter user
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['email', 'cellphone', 'countrycode', 'countryiso',
'defaultsmsprovider', 'directtwitter',
'twitteruser']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newContact()\n')
kwargs['name'] = name
contactinfo = self.request("POST", "notification_contacts",
kwargs).json()['contact']
return PingdomContact(self, contactinfo)
def deleteContacts(self, contactids):
"""Deletes a list of contacts. CANNOT BE REVERSED!
Provide a comma-separated list of contactid's to delete
Returns status message
"""
return self.request("DELETE", "notification_contacts",
{'delcheckids': contactids}).json()['message']
def singleTest(self, host, checktype, **kwargs):
"""Performs a single test using a specified Pingdom probe against a
specified target. Please note that this method is meant to be used
sparingly, not to set up your own monitoring solution.
Provide hostname and check type, followed by any optional arguments.
Types available:
* http
* httpcustom
* tcp
* ping
* dns
* udp
* smtp
* pop3
Optional arguments:
* probeid -- Probe to use for check
Type: Integer
Default: A random probe
See newCheck() docstring for type-specific arguments
Returned structure:
{
'status' : <String> Test result status ['up, 'down']
'responsetime' : <Integer> Response time in milliseconds
'statusdesc' : <String> Short status description
'statusdesclong' : <String> Long status description
'probeid' : <Integer> Probe identifier
'probedesc' : <String> Probe description
}
"""
if checktype == 'http':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'shouldcontain',
'shouldnotcontain', 'postdata']:
if key.startswith('requestheader') is not True:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'http'\n")
elif checktype == 'httpcustom':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'url',
'encryption', 'port', 'auth', 'additionalurls']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'httpcustom'\n")
elif checktype == 'tcp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'tcp'\n")
elif checktype == 'ping':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'ping'\n")
elif checktype == 'dns':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'expectedip',
'nameserver']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'dns'\n")
elif checktype == 'udp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtosend', 'stringtoexpect']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'udp'\n")
elif checktype == 'smtp':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port', 'auth',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'smtp'\n")
elif checktype == 'pop3':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'pop3'\n")
elif checktype == 'imap':
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['probeid', 'port',
'stringtoexpect', 'encryption']:
sys.stderr.write("'%s'" % key + ' is not a valid ' +
'argument of singleTest() for type ' +
"'imap'\n")
else:
raise Exception("Invalid checktype in singleTest()")
parameters = {'host': host, 'type': checktype}
for key, value in kwargs.iteritems():
parameters[key] = value
checkinfo = self.request('GET', "single", parameters)
return checkinfo.json()['result']
def getSettings(self):
"""Returns all account-specific settings.
Returned structure:
{
'firstname' : <String> First name
'lastname' : <String> Last name
'company' : <String> Company
'email' : <String> Email
'phone' : <String> Phone
'phonecountryiso' : <String> Phone country ISO code
'cellphone' : <String> Cellphone
'cellphonecountryiso' : <String> Cellphone country ISO code
'address' : <String> Address line 1
'address2' : <String> Address line 2
'zip' : <String> Zip, postal code or equivalent
'location' : <String> City / location
'state' : <String> State or equivalent
'autologout' : <Boolean> Enable auto-logout
'country' :
{
'name' : <String> Country name
'iso' : <String> Country ISO-code
'countryid' : <Integer> Country identifier
}
'vatcode' : <String> For certain EU countries, VAT-code
'region' : <String> Region
'regionid' : <Integer> Region identifier, see reference
'accountcreated' : <Integer> Account creation timestamp
'timezone' :
{
'id' : <String> Timezone name
'description' : <String> Timezone description
'timezoneid' : <Integer> Timezone identifier
}
'dateformat' : <String> Date format
'timeformat' : <String> Time format
'datetimeformatid' : <Integer> Date/time format identifier
'numberformat' : <String> Number format
'numberformatexample' : <String> Example of number presentation
'numberformatid' : <Integer> Number format identifier
'publicreportscode' : <String> URL code
'settingssaved' : <Boolean> True if user has saved initial
settings in control panel
}
"""
return self.request('GET', 'settings').json()['settings']
def modifySettings(self, **kwargs):
"""Modify account-specific settings.
Returns status message for operation
Optional parameters:
* firstname -- First name
Type: String
* lastname -- Last name
Type: String
* company -- Company
Type: String
* email -- Email (Please note that your email is used for
authentication purposes such as using this API or logging into
the Pingdom Panel)
Type: String
* cellphone -- Cellphone (without country code)
(Requires cellcountrycode and cellcountryiso)
Type: String
* cellcountrycode -- Cellphone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* cellcountryiso -- Cellphone country ISO code, for example
US(USA) or SE (Sweden)
Type: String
* phone -- Phone (without country code) (Requires phonecountrycode
and phonecountryiso)
Type: String
* phonecountrycode -- Phone country code, for example 1 (USA)
or 46 (Sweden)
Type: Integer
* phonecountryiso -- Phone country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* address -- Address line 1
Type: String
* address2 -- Address line 2
Type: String
* zip -- Zip, postal code or equivalent
Type: String
* location -- City / location
Type: String
* state -- State, province or equivalent
Type: String
* countryiso -- Country ISO code, for example US (USA)
or SE (Sweden)
Type: String
* vatcode -- For certain EU countries, VAT-code.
Example: SE123456789
Type: String
* autologout -- Enable auto-logout
Type: Boolean
* regionid -- Region identifier, for localization purposes.
0 for "Custom"/none. See the API resource "Reference" for more
information
Type: Integer
* timezoneid -- Time zone identifier. See the API resource
"Reference" for more information
Type: Integer
* datetimeformatid -- Date/time format identifier. See the API
resource "Reference" for more information
Type: Integer
* numberformatid -- Number format identifier. See the API resource
"Reference" for more information
Type: Integer
* pubrcustomdesign -- Use custom design for public reports
Type: Boolean
* pubrtextcolor -- Public reports, custom text color
(Example: FEFFFE or 99CC00)
Type: String
* pubrbackgroundcolor -- Public reports, background color
(Example: FEFFFE or 99CC00)
Type: String
* pubrlogourl -- Public reports, URL to custom logotype.
This parameter is currently disabled for public use.
(Example: stats.pingdom.com/images/logo.png)
Type: String
* pubrmonths -- Public reports, nuber of months to show
Type: String ['none', 'all', '3']
* pubrshowoverview -- Public reports, enable overview
Type: Boolean
* pubrcustomdomain -- Public reports, custom domain. Must be a DNS
CNAME with target stats.pingdom.com
Type: Boolean
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['firstname', 'lastname', 'company', 'email',
'cellphone', 'cellcountrycode', 'cellcountryiso',
'phone', 'phonecountrycode', 'phonecountryiso',
'address', 'address2', 'zip', 'location', 'state',
'countryiso', 'vatcode', 'autologout', 'regionid',
'timezoneid', 'datetimeformatid', 'numberformatid',
'pubrcustomdesign', 'pubrtextcolor',
'pubrbackgroundcolor', 'pubrlogourl', 'pubrmonths',
'pubrshowoverview', 'pubrcustomdomain']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of modifySettings()\n')
return self.request('PUT', 'settings', kwargs).json()['message']
def getEmailReports(self):
"""Returns a list of PingdomEmailReport instances."""
reports = [PingdomEmailReport(self, x) for x in
self.request('GET',
'reports.email').json()['subscriptions']]
return reports
def newEmailReport(self, name, **kwargs):
"""Creates a new email report
Returns status message for operation
Optional parameters:
* checkid -- Check identifier. If omitted, this will be an
overview report
Type: Integer
* frequency -- Report frequency
Type: String ['monthly', 'weekly', 'daily']
* contactids -- Comma separated list of receiving contact
identifiers
Type: String
* additionalemails -- Comma separated list of additional receiving
emails
Type: String
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['checkid', 'frequency', 'contactids',
'additionalemails']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newEmailReport()\n')
parameters = {'name': name}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.email',
parameters).json()['message']
def getPublicReports(self):
"""Returns a list of public (web-based) reports
Returned structure:
[
{
'checkid' : <Integer> Check identifier
'checkname' : <String> Check name
'reporturl' : <String> URL to report
},
...
]
"""
return self.request('GET', 'reports.public').json()['public']
def getSharedReports(self):
"""Returns a list of PingdomSharedReport instances"""
response = self.request('GET',
'reports.shared').json()['shared']['banners']
reports = [PingdomSharedReport(self, x) for x in response]
return reports
def newSharedReport(self, checkid, **kwargs):
"""Create a shared report (banner).
Returns status message for operation
Optional parameters:
* auto -- Automatic period (If false, requires: fromyear,
frommonth, fromday, toyear, tomonth, today)
Type: Boolean
* type -- Banner type
Type: String ['uptime', 'response']
* fromyear -- Period start: year
Type: Integer
* frommonth -- Period start: month
Type: Integer
* fromday -- Period start: day
Type: Integer
* toyear -- Period end: year
Type: Integer
* tomonth -- Period end: month
Type: Integer
* today -- Period end: day
Type: Integer
"""
# Warn user about unhandled parameters
for key in kwargs:
if key not in ['auto', 'type', 'fromyear', 'frommonth', 'fromday',
'toyear', 'tomonth', 'today', 'sharedtype']:
sys.stderr.write("'%s'" % key + ' is not a valid argument ' +
'of newSharedReport()\n')
parameters = {'checkid': checkid, 'sharedtype': 'banner'}
for key, value in kwargs.iteritems():
parameters[key] = value
return self.request('POST', 'reports.shared',
parameters).json()['message']
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.