repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
hehongliang/tensorflow | tensorflow/contrib/timeseries/examples/multivariate_test.py | 91 | 1330 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests that the TensorFlow parts of the multivariate example run."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.timeseries.examples import multivariate
from tensorflow.python.platform import test
class MultivariateExampleTest(test.TestCase):
def test_shapes_structural(self):
times, values = multivariate.multivariate_train_and_sample(
export_directory=self.get_temp_dir(), training_steps=5)
self.assertAllEqual([1100], times.shape)
self.assertAllEqual([1100, 5], values.shape)
if __name__ == "__main__":
test.main()
| apache-2.0 |
g19-hs/personfinder | app/unidecode/x04f.py | 252 | 4607 | data = (
'Zhong ', # 0x00
'Qi ', # 0x01
'Pei ', # 0x02
'Yu ', # 0x03
'Diao ', # 0x04
'Dun ', # 0x05
'Wen ', # 0x06
'Yi ', # 0x07
'Xin ', # 0x08
'Kang ', # 0x09
'Yi ', # 0x0a
'Ji ', # 0x0b
'Ai ', # 0x0c
'Wu ', # 0x0d
'Ji ', # 0x0e
'Fu ', # 0x0f
'Fa ', # 0x10
'Xiu ', # 0x11
'Jin ', # 0x12
'Bei ', # 0x13
'Dan ', # 0x14
'Fu ', # 0x15
'Tang ', # 0x16
'Zhong ', # 0x17
'You ', # 0x18
'Huo ', # 0x19
'Hui ', # 0x1a
'Yu ', # 0x1b
'Cui ', # 0x1c
'Chuan ', # 0x1d
'San ', # 0x1e
'Wei ', # 0x1f
'Chuan ', # 0x20
'Che ', # 0x21
'Ya ', # 0x22
'Xian ', # 0x23
'Shang ', # 0x24
'Chang ', # 0x25
'Lun ', # 0x26
'Cang ', # 0x27
'Xun ', # 0x28
'Xin ', # 0x29
'Wei ', # 0x2a
'Zhu ', # 0x2b
'[?] ', # 0x2c
'Xuan ', # 0x2d
'Nu ', # 0x2e
'Bo ', # 0x2f
'Gu ', # 0x30
'Ni ', # 0x31
'Ni ', # 0x32
'Xie ', # 0x33
'Ban ', # 0x34
'Xu ', # 0x35
'Ling ', # 0x36
'Zhou ', # 0x37
'Shen ', # 0x38
'Qu ', # 0x39
'Si ', # 0x3a
'Beng ', # 0x3b
'Si ', # 0x3c
'Jia ', # 0x3d
'Pi ', # 0x3e
'Yi ', # 0x3f
'Si ', # 0x40
'Ai ', # 0x41
'Zheng ', # 0x42
'Dian ', # 0x43
'Han ', # 0x44
'Mai ', # 0x45
'Dan ', # 0x46
'Zhu ', # 0x47
'Bu ', # 0x48
'Qu ', # 0x49
'Bi ', # 0x4a
'Shao ', # 0x4b
'Ci ', # 0x4c
'Wei ', # 0x4d
'Di ', # 0x4e
'Zhu ', # 0x4f
'Zuo ', # 0x50
'You ', # 0x51
'Yang ', # 0x52
'Ti ', # 0x53
'Zhan ', # 0x54
'He ', # 0x55
'Bi ', # 0x56
'Tuo ', # 0x57
'She ', # 0x58
'Yu ', # 0x59
'Yi ', # 0x5a
'Fo ', # 0x5b
'Zuo ', # 0x5c
'Kou ', # 0x5d
'Ning ', # 0x5e
'Tong ', # 0x5f
'Ni ', # 0x60
'Xuan ', # 0x61
'Qu ', # 0x62
'Yong ', # 0x63
'Wa ', # 0x64
'Qian ', # 0x65
'[?] ', # 0x66
'Ka ', # 0x67
'[?] ', # 0x68
'Pei ', # 0x69
'Huai ', # 0x6a
'He ', # 0x6b
'Lao ', # 0x6c
'Xiang ', # 0x6d
'Ge ', # 0x6e
'Yang ', # 0x6f
'Bai ', # 0x70
'Fa ', # 0x71
'Ming ', # 0x72
'Jia ', # 0x73
'Er ', # 0x74
'Bing ', # 0x75
'Ji ', # 0x76
'Hen ', # 0x77
'Huo ', # 0x78
'Gui ', # 0x79
'Quan ', # 0x7a
'Tiao ', # 0x7b
'Jiao ', # 0x7c
'Ci ', # 0x7d
'Yi ', # 0x7e
'Shi ', # 0x7f
'Xing ', # 0x80
'Shen ', # 0x81
'Tuo ', # 0x82
'Kan ', # 0x83
'Zhi ', # 0x84
'Gai ', # 0x85
'Lai ', # 0x86
'Yi ', # 0x87
'Chi ', # 0x88
'Kua ', # 0x89
'Guang ', # 0x8a
'Li ', # 0x8b
'Yin ', # 0x8c
'Shi ', # 0x8d
'Mi ', # 0x8e
'Zhu ', # 0x8f
'Xu ', # 0x90
'You ', # 0x91
'An ', # 0x92
'Lu ', # 0x93
'Mou ', # 0x94
'Er ', # 0x95
'Lun ', # 0x96
'Tong ', # 0x97
'Cha ', # 0x98
'Chi ', # 0x99
'Xun ', # 0x9a
'Gong ', # 0x9b
'Zhou ', # 0x9c
'Yi ', # 0x9d
'Ru ', # 0x9e
'Jian ', # 0x9f
'Xia ', # 0xa0
'Jia ', # 0xa1
'Zai ', # 0xa2
'Lu ', # 0xa3
'Ko ', # 0xa4
'Jiao ', # 0xa5
'Zhen ', # 0xa6
'Ce ', # 0xa7
'Qiao ', # 0xa8
'Kuai ', # 0xa9
'Chai ', # 0xaa
'Ning ', # 0xab
'Nong ', # 0xac
'Jin ', # 0xad
'Wu ', # 0xae
'Hou ', # 0xaf
'Jiong ', # 0xb0
'Cheng ', # 0xb1
'Zhen ', # 0xb2
'Zuo ', # 0xb3
'Chou ', # 0xb4
'Qin ', # 0xb5
'Lu ', # 0xb6
'Ju ', # 0xb7
'Shu ', # 0xb8
'Ting ', # 0xb9
'Shen ', # 0xba
'Tuo ', # 0xbb
'Bo ', # 0xbc
'Nan ', # 0xbd
'Hao ', # 0xbe
'Bian ', # 0xbf
'Tui ', # 0xc0
'Yu ', # 0xc1
'Xi ', # 0xc2
'Cu ', # 0xc3
'E ', # 0xc4
'Qiu ', # 0xc5
'Xu ', # 0xc6
'Kuang ', # 0xc7
'Ku ', # 0xc8
'Wu ', # 0xc9
'Jun ', # 0xca
'Yi ', # 0xcb
'Fu ', # 0xcc
'Lang ', # 0xcd
'Zu ', # 0xce
'Qiao ', # 0xcf
'Li ', # 0xd0
'Yong ', # 0xd1
'Hun ', # 0xd2
'Jing ', # 0xd3
'Xian ', # 0xd4
'San ', # 0xd5
'Pai ', # 0xd6
'Su ', # 0xd7
'Fu ', # 0xd8
'Xi ', # 0xd9
'Li ', # 0xda
'Fu ', # 0xdb
'Ping ', # 0xdc
'Bao ', # 0xdd
'Yu ', # 0xde
'Si ', # 0xdf
'Xia ', # 0xe0
'Xin ', # 0xe1
'Xiu ', # 0xe2
'Yu ', # 0xe3
'Ti ', # 0xe4
'Che ', # 0xe5
'Chou ', # 0xe6
'[?] ', # 0xe7
'Yan ', # 0xe8
'Lia ', # 0xe9
'Li ', # 0xea
'Lai ', # 0xeb
'[?] ', # 0xec
'Jian ', # 0xed
'Xiu ', # 0xee
'Fu ', # 0xef
'He ', # 0xf0
'Ju ', # 0xf1
'Xiao ', # 0xf2
'Pai ', # 0xf3
'Jian ', # 0xf4
'Biao ', # 0xf5
'Chu ', # 0xf6
'Fei ', # 0xf7
'Feng ', # 0xf8
'Ya ', # 0xf9
'An ', # 0xfa
'Bei ', # 0xfb
'Yu ', # 0xfc
'Xin ', # 0xfd
'Bi ', # 0xfe
'Jian ', # 0xff
)
| apache-2.0 |
mayankcu/Django-social | venv/Lib/encodings/iso8859_9.py | 593 | 13412 | """ Python Character Mapping Codec iso8859_9 generated from 'MAPPINGS/ISO8859/8859-9.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-9',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xbf' # 0xBF -> INVERTED QUESTION MARK
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u011e' # 0xD0 -> LATIN CAPITAL LETTER G WITH BREVE
u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u0130' # 0xDD -> LATIN CAPITAL LETTER I WITH DOT ABOVE
u'\u015e' # 0xDE -> LATIN CAPITAL LETTER S WITH CEDILLA
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u011f' # 0xF0 -> LATIN SMALL LETTER G WITH BREVE
u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
u'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u0131' # 0xFD -> LATIN SMALL LETTER DOTLESS I
u'\u015f' # 0xFE -> LATIN SMALL LETTER S WITH CEDILLA
u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| bsd-3-clause |
Brainiq7/Ananse | ananse_dl/extractor/franceinter.py | 139 | 1686 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import int_or_none
class FranceInterIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?franceinter\.fr/player/reecouter\?play=(?P<id>[0-9]+)'
_TEST = {
'url': 'http://www.franceinter.fr/player/reecouter?play=793962',
'md5': '4764932e466e6f6c79c317d2e74f6884',
"info_dict": {
'id': '793962',
'ext': 'mp3',
'title': 'L’Histoire dans les jeux vidéo',
'description': 'md5:7e93ddb4451e7530022792240a3049c7',
'timestamp': 1387369800,
'upload_date': '20131218',
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
path = self._search_regex(
r'<a id="player".+?href="([^"]+)"', webpage, 'video url')
video_url = 'http://www.franceinter.fr/' + path
title = self._html_search_regex(
r'<span class="title">(.+?)</span>', webpage, 'title')
description = self._html_search_regex(
r'<span class="description">(.*?)</span>',
webpage, 'description', fatal=False)
timestamp = int_or_none(self._search_regex(
r'data-date="(\d+)"', webpage, 'upload date', fatal=False))
return {
'id': video_id,
'title': title,
'description': description,
'timestamp': timestamp,
'formats': [{
'url': video_url,
'vcodec': 'none',
}],
}
| unlicense |
cparawhore/ProyectoSubastas | site-packages/django/views/generic/list.py | 77 | 7223 | from __future__ import unicode_literals
from django.core.paginator import Paginator, InvalidPage
from django.core.exceptions import ImproperlyConfigured
from django.db.models.query import QuerySet
from django.http import Http404
from django.utils.translation import ugettext as _
from django.views.generic.base import TemplateResponseMixin, ContextMixin, View
class MultipleObjectMixin(ContextMixin):
"""
A mixin for views manipulating multiple objects.
"""
allow_empty = True
queryset = None
model = None
paginate_by = None
paginate_orphans = 0
context_object_name = None
paginator_class = Paginator
page_kwarg = 'page'
def get_queryset(self):
"""
Return the list of items for this view.
The return value must be an iterable and may be an instance of
`QuerySet` in which case `QuerySet` specific behavior will be enabled.
"""
if self.queryset is not None:
queryset = self.queryset
if isinstance(queryset, QuerySet):
queryset = queryset.all()
elif self.model is not None:
queryset = self.model._default_manager.all()
else:
raise ImproperlyConfigured(
"%(cls)s is missing a QuerySet. Define "
"%(cls)s.model, %(cls)s.queryset, or override "
"%(cls)s.get_queryset()." % {
'cls': self.__class__.__name__
}
)
return queryset
def paginate_queryset(self, queryset, page_size):
"""
Paginate the queryset, if needed.
"""
paginator = self.get_paginator(
queryset, page_size, orphans=self.get_paginate_orphans(),
allow_empty_first_page=self.get_allow_empty())
page_kwarg = self.page_kwarg
page = self.kwargs.get(page_kwarg) or self.request.GET.get(page_kwarg) or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_("Page is not 'last', nor can it be converted to an int."))
try:
page = paginator.page(page_number)
return (paginator, page, page.object_list, page.has_other_pages())
except InvalidPage as e:
raise Http404(_('Invalid page (%(page_number)s): %(message)s') % {
'page_number': page_number,
'message': str(e)
})
def get_paginate_by(self, queryset):
"""
Get the number of items to paginate by, or ``None`` for no pagination.
"""
return self.paginate_by
def get_paginator(self, queryset, per_page, orphans=0,
allow_empty_first_page=True, **kwargs):
"""
Return an instance of the paginator for this view.
"""
return self.paginator_class(
queryset, per_page, orphans=orphans,
allow_empty_first_page=allow_empty_first_page, **kwargs)
def get_paginate_orphans(self):
"""
Returns the maximum number of orphans extend the last page by when
paginating.
"""
return self.paginate_orphans
def get_allow_empty(self):
"""
Returns ``True`` if the view should display empty lists, and ``False``
if a 404 should be raised instead.
"""
return self.allow_empty
def get_context_object_name(self, object_list):
"""
Get the name of the item to be used in the context.
"""
if self.context_object_name:
return self.context_object_name
elif hasattr(object_list, 'model'):
return '%s_list' % object_list.model._meta.model_name
else:
return None
def get_context_data(self, **kwargs):
"""
Get the context for this view.
"""
queryset = kwargs.pop('object_list', self.object_list)
page_size = self.get_paginate_by(queryset)
context_object_name = self.get_context_object_name(queryset)
if page_size:
paginator, page, queryset, is_paginated = self.paginate_queryset(queryset, page_size)
context = {
'paginator': paginator,
'page_obj': page,
'is_paginated': is_paginated,
'object_list': queryset
}
else:
context = {
'paginator': None,
'page_obj': None,
'is_paginated': False,
'object_list': queryset
}
if context_object_name is not None:
context[context_object_name] = queryset
context.update(kwargs)
return super(MultipleObjectMixin, self).get_context_data(**context)
class BaseListView(MultipleObjectMixin, View):
"""
A base view for displaying a list of objects.
"""
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
allow_empty = self.get_allow_empty()
if not allow_empty:
# When pagination is enabled and object_list is a queryset,
# it's better to do a cheap query than to load the unpaginated
# queryset in memory.
if (self.get_paginate_by(self.object_list) is not None
and hasattr(self.object_list, 'exists')):
is_empty = not self.object_list.exists()
else:
is_empty = len(self.object_list) == 0
if is_empty:
raise Http404(_("Empty list and '%(class_name)s.allow_empty' is False.")
% {'class_name': self.__class__.__name__})
context = self.get_context_data()
return self.render_to_response(context)
class MultipleObjectTemplateResponseMixin(TemplateResponseMixin):
"""
Mixin for responding with a template and list of objects.
"""
template_name_suffix = '_list'
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if render_to_response is overridden.
"""
try:
names = super(MultipleObjectTemplateResponseMixin, self).get_template_names()
except ImproperlyConfigured:
# If template_name isn't specified, it's not a problem --
# we just start with an empty list.
names = []
# If the list is a queryset, we'll invent a template name based on the
# app and model name. This name gets put at the end of the template
# name list so that user-supplied names override the automatically-
# generated ones.
if hasattr(self.object_list, 'model'):
opts = self.object_list.model._meta
names.append("%s/%s%s.html" % (opts.app_label, opts.model_name, self.template_name_suffix))
return names
class ListView(MultipleObjectTemplateResponseMixin, BaseListView):
"""
Render some list of objects, set by `self.model` or `self.queryset`.
`self.queryset` can actually be any iterable of items, not just a queryset.
"""
| mit |
Ali-aqrabawi/ezclinic | lib/django/contrib/gis/db/backends/spatialite/models.py | 510 | 2946 | """
The GeometryColumns and SpatialRefSys models for the SpatiaLite backend.
"""
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
from django.contrib.gis.db.backends.spatialite.base import DatabaseWrapper
from django.db import connection, models
from django.db.backends.signals import connection_created
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class SpatialiteGeometryColumns(models.Model):
"""
The 'geometry_columns' table from SpatiaLite.
"""
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
spatial_index_enabled = models.IntegerField()
class Meta:
app_label = 'gis'
db_table = 'geometry_columns'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'f_table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'f_geometry_column'
def __str__(self):
return "%s.%s - %dD %s field (SRID: %d)" % \
(self.f_table_name, self.f_geometry_column,
self.coord_dimension, self.type, self.srid)
class SpatialiteSpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from SpatiaLite.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
ref_sys_name = models.CharField(max_length=256)
proj4text = models.CharField(max_length=2048)
@property
def wkt(self):
if hasattr(self, 'srtext'):
return self.srtext
from django.contrib.gis.gdal import SpatialReference
return SpatialReference(self.proj4text).wkt
class Meta:
app_label = 'gis'
db_table = 'spatial_ref_sys'
managed = False
def add_spatial_version_related_fields(sender, **kwargs):
"""
Adds fields after establishing a database connection to prevent database
operations at compile time.
"""
if connection_created.disconnect(add_spatial_version_related_fields, sender=DatabaseWrapper):
spatial_version = connection.ops.spatial_version[0]
if spatial_version >= 4:
SpatialiteSpatialRefSys.add_to_class('srtext', models.CharField(max_length=2048))
SpatialiteGeometryColumns.add_to_class('type', models.IntegerField(db_column='geometry_type'))
else:
SpatialiteGeometryColumns.add_to_class('type', models.CharField(max_length=30))
connection_created.connect(add_spatial_version_related_fields, sender=DatabaseWrapper)
| mit |
utkarsh-goswami/erpnext | erpnext/stock/doctype/delivery_note/delivery_note.py | 3 | 16249 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, cint
from frappe import msgprint, _
import frappe.defaults
from frappe.model.mapper import get_mapped_doc
from erpnext.controllers.selling_controller import SellingController
from frappe.desk.notifications import clear_doctype_notifications
from erpnext.stock.doctype.batch.batch import set_batch_nos
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class DeliveryNote(SellingController):
def __init__(self, arg1, arg2=None):
super(DeliveryNote, self).__init__(arg1, arg2)
self.status_updater = [{
'source_dt': 'Delivery Note Item',
'target_dt': 'Sales Order Item',
'join_field': 'so_detail',
'target_field': 'delivered_qty',
'target_parent_dt': 'Sales Order',
'target_parent_field': 'per_delivered',
'target_ref_field': 'qty',
'source_field': 'qty',
'percent_join_field': 'against_sales_order',
'status_field': 'delivery_status',
'keyword': 'Delivered',
'second_source_dt': 'Sales Invoice Item',
'second_source_field': 'qty',
'second_join_field': 'so_detail',
'overflow_type': 'delivery',
'second_source_extra_cond': """ and exists(select name from `tabSales Invoice`
where name=`tabSales Invoice Item`.parent and update_stock = 1)"""
},
{
'source_dt': 'Delivery Note Item',
'target_dt': 'Sales Invoice Item',
'join_field': 'si_detail',
'target_field': 'delivered_qty',
'target_parent_dt': 'Sales Invoice',
'target_ref_field': 'qty',
'source_field': 'qty',
'percent_join_field': 'against_sales_invoice',
'overflow_type': 'delivery',
'no_tolerance': 1
},
{
'source_dt': 'Delivery Note Item',
'target_dt': 'Sales Order Item',
'join_field': 'so_detail',
'target_field': 'returned_qty',
'target_parent_dt': 'Sales Order',
'source_field': '-1 * qty',
'extra_cond': """ and exists (select name from `tabDelivery Note` where name=`tabDelivery Note Item`.parent and is_return=1)"""
}]
def before_print(self):
def toggle_print_hide(meta, fieldname):
df = meta.get_field(fieldname)
if self.get("print_without_amount"):
df.set("__print_hide", 1)
else:
df.delete_key("__print_hide")
item_meta = frappe.get_meta("Delivery Note Item")
print_hide_fields = {
"parent": ["grand_total", "rounded_total", "in_words", "currency", "total", "taxes"],
"items": ["rate", "amount", "price_list_rate", "discount_percentage"]
}
for key, fieldname in print_hide_fields.items():
for f in fieldname:
toggle_print_hide(self.meta if key == "parent" else item_meta, f)
def set_actual_qty(self):
for d in self.get('items'):
if d.item_code and d.warehouse:
actual_qty = frappe.db.sql("""select actual_qty from `tabBin`
where item_code = %s and warehouse = %s""", (d.item_code, d.warehouse))
d.actual_qty = actual_qty and flt(actual_qty[0][0]) or 0
def so_required(self):
"""check in manage account if sales order required or not"""
if frappe.db.get_value("Selling Settings", None, 'so_required') == 'Yes':
for d in self.get('items'):
if not d.against_sales_order:
frappe.throw(_("Sales Order required for Item {0}").format(d.item_code))
def validate(self):
self.validate_posting_time()
super(DeliveryNote, self).validate()
self.set_status()
self.so_required()
self.validate_proj_cust()
self.check_close_sales_order("against_sales_order")
self.validate_for_items()
self.validate_warehouse()
self.validate_uom_is_integer("stock_uom", "stock_qty")
self.validate_uom_is_integer("uom", "qty")
self.validate_with_previous_doc()
if self._action != 'submit' and not self.is_return:
set_batch_nos(self, 'warehouse', True)
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self)
self.update_current_stock()
if not self.installation_status: self.installation_status = 'Not Installed'
def validate_with_previous_doc(self):
super(DeliveryNote, self).validate_with_previous_doc({
"Sales Order": {
"ref_dn_field": "against_sales_order",
"compare_fields": [["customer", "="], ["company", "="], ["project", "="], ["currency", "="]]
},
"Sales Order Item": {
"ref_dn_field": "so_detail",
"compare_fields": [["item_code", "="], ["uom", "="], ["conversion_factor", "="]],
"is_child_table": True,
"allow_duplicate_prev_row_id": True
},
"Sales Invoice": {
"ref_dn_field": "against_sales_invoice",
"compare_fields": [["customer", "="], ["company", "="], ["project", "="], ["currency", "="]]
},
"Sales Invoice Item": {
"ref_dn_field": "si_detail",
"compare_fields": [["item_code", "="], ["uom", "="], ["conversion_factor", "="]],
"is_child_table": True,
"allow_duplicate_prev_row_id": True
},
})
if cint(frappe.db.get_single_value('Selling Settings', 'maintain_same_sales_rate')) \
and not self.is_return:
self.validate_rate_with_reference_doc([["Sales Order", "against_sales_order", "so_detail"],
["Sales Invoice", "against_sales_invoice", "si_detail"]])
def validate_proj_cust(self):
"""check for does customer belong to same project as entered.."""
if self.project and self.customer:
res = frappe.db.sql("""select name from `tabProject`
where name = %s and (customer = %s or
ifnull(customer,'')='')""", (self.project, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project))
def validate_for_items(self):
check_list, chk_dupl_itm = [], []
if cint(frappe.db.get_single_value("Selling Settings", "allow_multiple_items")):
return
for d in self.get('items'):
e = [d.item_code, d.description, d.warehouse, d.against_sales_order or d.against_sales_invoice, d.batch_no or '']
f = [d.item_code, d.description, d.against_sales_order or d.against_sales_invoice]
if frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1:
if e in check_list:
msgprint(_("Note: Item {0} entered multiple times").format(d.item_code))
else:
check_list.append(e)
else:
if f in chk_dupl_itm:
msgprint(_("Note: Item {0} entered multiple times").format(d.item_code))
else:
chk_dupl_itm.append(f)
def validate_warehouse(self):
super(DeliveryNote, self).validate_warehouse()
for d in self.get_item_list():
if frappe.db.get_value("Item", d['item_code'], "is_stock_item") == 1:
if not d['warehouse']:
frappe.throw(_("Warehouse required for stock Item {0}").format(d["item_code"]))
def update_current_stock(self):
if self.get("_action") and self._action != "update_after_submit":
for d in self.get('items'):
d.actual_qty = frappe.db.get_value("Bin", {"item_code": d.item_code,
"warehouse": d.warehouse}, "actual_qty")
for d in self.get('packed_items'):
bin_qty = frappe.db.get_value("Bin", {"item_code": d.item_code,
"warehouse": d.warehouse}, ["actual_qty", "projected_qty"], as_dict=True)
if bin_qty:
d.actual_qty = flt(bin_qty.actual_qty)
d.projected_qty = flt(bin_qty.projected_qty)
def on_submit(self):
self.validate_packed_qty()
# Check for Approving Authority
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.company, self.base_grand_total, self)
# update delivered qty in sales order
self.update_prevdoc_status()
self.update_billing_status()
if not self.is_return:
self.check_credit_limit()
# Updating stock ledger should always be called after updating prevdoc status,
# because updating reserved qty in bin depends upon updated delivered qty in SO
self.update_stock_ledger()
self.make_gl_entries()
def on_cancel(self):
self.check_close_sales_order("against_sales_order")
self.check_next_docstatus()
self.update_prevdoc_status()
self.update_billing_status()
# Updating stock ledger should always be called after updating prevdoc status,
# because updating reserved qty in bin depends upon updated delivered qty in SO
self.update_stock_ledger()
self.cancel_packing_slips()
self.make_gl_entries_on_cancel()
def check_credit_limit(self):
from erpnext.selling.doctype.customer.customer import check_credit_limit
validate_against_credit_limit = False
for d in self.get("items"):
if not (d.against_sales_order or d.against_sales_invoice):
validate_against_credit_limit = True
break
if validate_against_credit_limit:
check_credit_limit(self.customer, self.company)
def validate_packed_qty(self):
"""
Validate that if packed qty exists, it should be equal to qty
"""
if not any([flt(d.get('packed_qty')) for d in self.get("items")]):
return
has_error = False
for d in self.get("items"):
if flt(d.get('qty')) != flt(d.get('packed_qty')):
frappe.msgprint(_("Packed quantity must equal quantity for Item {0} in row {1}").format(d.item_code, d.idx))
has_error = True
if has_error:
raise frappe.ValidationError
def check_next_docstatus(self):
submit_rv = frappe.db.sql("""select t1.name
from `tabSales Invoice` t1,`tabSales Invoice Item` t2
where t1.name = t2.parent and t2.delivery_note = %s and t1.docstatus = 1""",
(self.name))
if submit_rv:
frappe.throw(_("Sales Invoice {0} has already been submitted").format(submit_rv[0][0]))
submit_in = frappe.db.sql("""select t1.name
from `tabInstallation Note` t1, `tabInstallation Note Item` t2
where t1.name = t2.parent and t2.prevdoc_docname = %s and t1.docstatus = 1""",
(self.name))
if submit_in:
frappe.throw(_("Installation Note {0} has already been submitted").format(submit_in[0][0]))
def cancel_packing_slips(self):
"""
Cancel submitted packing slips related to this delivery note
"""
res = frappe.db.sql("""SELECT name FROM `tabPacking Slip` WHERE delivery_note = %s
AND docstatus = 1""", self.name)
if res:
for r in res:
ps = frappe.get_doc('Packing Slip', r[0])
ps.cancel()
frappe.msgprint(_("Packing Slip(s) cancelled"))
def update_status(self, status):
self.set_status(update=True, status=status)
self.notify_update()
clear_doctype_notifications(self)
def update_billing_status(self, update_modified=True):
updated_delivery_notes = [self.name]
for d in self.get("items"):
if d.si_detail and not d.so_detail:
d.db_set('billed_amt', d.amount, update_modified=update_modified)
elif d.so_detail:
updated_delivery_notes += update_billed_amount_based_on_so(d.so_detail, update_modified)
for dn in set(updated_delivery_notes):
dn_doc = self if (dn == self.name) else frappe.get_doc("Delivery Note", dn)
dn_doc.update_billing_percentage(update_modified=update_modified)
self.load_from_db()
def update_billed_amount_based_on_so(so_detail, update_modified=True):
# Billed against Sales Order directly
billed_against_so = frappe.db.sql("""select sum(amount) from `tabSales Invoice Item`
where so_detail=%s and (dn_detail is null or dn_detail = '') and docstatus=1""", so_detail)
billed_against_so = billed_against_so and billed_against_so[0][0] or 0
# Get all Delivery Note Item rows against the Sales Order Item row
dn_details = frappe.db.sql("""select dn_item.name, dn_item.amount, dn_item.si_detail, dn_item.parent
from `tabDelivery Note Item` dn_item, `tabDelivery Note` dn
where dn.name=dn_item.parent and dn_item.so_detail=%s
and dn.docstatus=1 and dn.is_return = 0
order by dn.posting_date asc, dn.posting_time asc, dn.name asc""", so_detail, as_dict=1)
updated_dn = []
for dnd in dn_details:
billed_amt_agianst_dn = 0
# If delivered against Sales Invoice
if dnd.si_detail:
billed_amt_agianst_dn = flt(dnd.amount)
billed_against_so -= billed_amt_agianst_dn
else:
# Get billed amount directly against Delivery Note
billed_amt_agianst_dn = frappe.db.sql("""select sum(amount) from `tabSales Invoice Item`
where dn_detail=%s and docstatus=1""", dnd.name)
billed_amt_agianst_dn = billed_amt_agianst_dn and billed_amt_agianst_dn[0][0] or 0
# Distribute billed amount directly against SO between DNs based on FIFO
if billed_against_so and billed_amt_agianst_dn < dnd.amount:
pending_to_bill = flt(dnd.amount) - billed_amt_agianst_dn
if pending_to_bill <= billed_against_so:
billed_amt_agianst_dn += pending_to_bill
billed_against_so -= pending_to_bill
else:
billed_amt_agianst_dn += billed_against_so
billed_against_so = 0
frappe.db.set_value("Delivery Note Item", dnd.name, "billed_amt", billed_amt_agianst_dn, update_modified=update_modified)
updated_dn.append(dnd.parent)
return updated_dn
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context.update({
'show_sidebar': True,
'show_search': True,
'no_breadcrumbs': True,
'title': _('Shipments'),
})
return list_context
def get_invoiced_qty_map(delivery_note):
"""returns a map: {dn_detail: invoiced_qty}"""
invoiced_qty_map = {}
for dn_detail, qty in frappe.db.sql("""select dn_detail, qty from `tabSales Invoice Item`
where delivery_note=%s and docstatus=1""", delivery_note):
if not invoiced_qty_map.get(dn_detail):
invoiced_qty_map[dn_detail] = 0
invoiced_qty_map[dn_detail] += qty
return invoiced_qty_map
@frappe.whitelist()
def make_sales_invoice(source_name, target_doc=None):
invoiced_qty_map = get_invoiced_qty_map(source_name)
def update_accounts(source, target):
target.is_pos = 0
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
if len(target.get("items")) == 0:
frappe.throw(_("All these items have already been invoiced"))
target.run_method("calculate_taxes_and_totals")
def update_item(source_doc, target_doc, source_parent):
target_doc.qty = source_doc.qty - invoiced_qty_map.get(source_doc.name, 0)
doc = get_mapped_doc("Delivery Note", source_name, {
"Delivery Note": {
"doctype": "Sales Invoice",
"validation": {
"docstatus": ["=", 1]
}
},
"Delivery Note Item": {
"doctype": "Sales Invoice Item",
"field_map": {
"name": "dn_detail",
"parent": "delivery_note",
"so_detail": "so_detail",
"against_sales_order": "sales_order",
"serial_no": "serial_no",
"cost_center": "cost_center"
},
"postprocess": update_item,
"filter": lambda d: abs(d.qty) - abs(invoiced_qty_map.get(d.name, 0))<=0
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"field_map": {
"incentives": "incentives"
},
"add_if_empty": True
}
}, target_doc, update_accounts)
return doc
@frappe.whitelist()
def make_installation_note(source_name, target_doc=None):
def update_item(obj, target, source_parent):
target.qty = flt(obj.qty) - flt(obj.installed_qty)
target.serial_no = obj.serial_no
doclist = get_mapped_doc("Delivery Note", source_name, {
"Delivery Note": {
"doctype": "Installation Note",
"validation": {
"docstatus": ["=", 1]
}
},
"Delivery Note Item": {
"doctype": "Installation Note Item",
"field_map": {
"name": "prevdoc_detail_docname",
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype",
},
"postprocess": update_item,
"condition": lambda doc: doc.installed_qty < doc.qty
}
}, target_doc)
return doclist
@frappe.whitelist()
def make_packing_slip(source_name, target_doc=None):
doclist = get_mapped_doc("Delivery Note", source_name, {
"Delivery Note": {
"doctype": "Packing Slip",
"field_map": {
"name": "delivery_note",
"letter_head": "letter_head"
},
"validation": {
"docstatus": ["=", 0]
}
}
}, target_doc)
return doclist
@frappe.whitelist()
def make_sales_return(source_name, target_doc=None):
from erpnext.controllers.sales_and_purchase_return import make_return_doc
return make_return_doc("Delivery Note", source_name, target_doc)
@frappe.whitelist()
def update_delivery_note_status(docname, status):
dn = frappe.get_doc("Delivery Note", docname)
dn.update_status(status)
| gpl-3.0 |
insop/sched-deadline2 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Core.py | 11088 | 3246 | # Core.py - Python extension for perf script, core functions
#
# Copyright (C) 2010 by Tom Zanussi <tzanussi@gmail.com>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
from collections import defaultdict
def autodict():
return defaultdict(autodict)
flag_fields = autodict()
symbolic_fields = autodict()
def define_flag_field(event_name, field_name, delim):
flag_fields[event_name][field_name]['delim'] = delim
def define_flag_value(event_name, field_name, value, field_str):
flag_fields[event_name][field_name]['values'][value] = field_str
def define_symbolic_field(event_name, field_name):
# nothing to do, really
pass
def define_symbolic_value(event_name, field_name, value, field_str):
symbolic_fields[event_name][field_name]['values'][value] = field_str
def flag_str(event_name, field_name, value):
string = ""
if flag_fields[event_name][field_name]:
print_delim = 0
keys = flag_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string += flag_fields[event_name][field_name]['values'][idx]
break
if idx and (value & idx) == idx:
if print_delim and flag_fields[event_name][field_name]['delim']:
string += " " + flag_fields[event_name][field_name]['delim'] + " "
string += flag_fields[event_name][field_name]['values'][idx]
print_delim = 1
value &= ~idx
return string
def symbol_str(event_name, field_name, value):
string = ""
if symbolic_fields[event_name][field_name]:
keys = symbolic_fields[event_name][field_name]['values'].keys()
keys.sort()
for idx in keys:
if not value and not idx:
string = symbolic_fields[event_name][field_name]['values'][idx]
break
if (value == idx):
string = symbolic_fields[event_name][field_name]['values'][idx]
break
return string
trace_flags = { 0x00: "NONE", \
0x01: "IRQS_OFF", \
0x02: "IRQS_NOSUPPORT", \
0x04: "NEED_RESCHED", \
0x08: "HARDIRQ", \
0x10: "SOFTIRQ" }
def trace_flag_str(value):
string = ""
print_delim = 0
keys = trace_flags.keys()
for idx in keys:
if not value and not idx:
string += "NONE"
break
if idx and (value & idx) == idx:
if print_delim:
string += " | ";
string += trace_flags[idx]
print_delim = 1
value &= ~idx
return string
def taskState(state):
states = {
0 : "R",
1 : "S",
2 : "D",
64: "DEAD"
}
if state not in states:
return "Unknown"
return states[state]
class EventHeaders:
def __init__(self, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
self.cpu = common_cpu
self.secs = common_secs
self.nsecs = common_nsecs
self.pid = common_pid
self.comm = common_comm
def ts(self):
return (self.secs * (10 ** 9)) + self.nsecs
def ts_format(self):
return "%d.%d" % (self.secs, int(self.nsecs / 1000))
| gpl-2.0 |
massakam/pulsar | dashboard/django/stats/templatetags/stats_extras.py | 4 | 2075 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from django import template
from django.utils import formats
from django.contrib.humanize.templatetags.humanize import intcomma
register = template.Library()
KB = 1 << 10
MB = 1 << 20
GB = 1 << 30
TB = 1 << 40
PB = 1 << 50
def fmt(x):
return str(formats.number_format(round(x, 1), 1))
@register.filter(name='file_size_value')
def file_size_value(bytes_):
bytes_ = float(bytes_)
if bytes_ < KB: return str(bytes_)
elif bytes_ < MB: return fmt(bytes_ / KB)
elif bytes_ < GB: return fmt(bytes_ / MB)
elif bytes_ < TB: return fmt(bytes_ / GB)
elif bytes_ < PB: return fmt(bytes_ / TB)
else: return fmt(bytes_ / PB)
@register.filter(name='file_size_unit')
def file_size_unit(bytes_):
if bytes_ < KB: return 'bytes'
elif bytes_ < MB: return 'KB'
elif bytes_ < GB: return 'MB'
elif bytes_ < TB: return 'GB'
elif bytes_ < PB: return 'TB'
else: return 'PB'
@register.filter(name='mbps')
def mbps(bytes_per_seconds):
if not bytes_per_seconds: return 0.0
else: return float(bytes_per_seconds) * 8 / 1024 / 1024
@register.filter(name='safe_intcomma')
def safe_intcomma(n):
if not n: return 0
else: return intcomma(n)
@register.filter(name='times')
def times(number):
return range(1, number + 1) | apache-2.0 |
indictranstech/phr-frappe | frappe/website/doctype/web_form/web_form.py | 16 | 3187 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe, json
from frappe.website.website_generator import WebsiteGenerator
from frappe import _
from frappe.utils.file_manager import save_file, remove_file_by_url
class WebForm(WebsiteGenerator):
template = "templates/generators/web_form.html"
condition_field = "published"
page_title_field = "title"
no_cache = 1
def get_context(self, context):
context.params = frappe.form_dict
if self.login_required and frappe.session.user != "Guest":
if self.allow_edit:
if self.allow_multiple:
meta = frappe.get_meta(self.doc_type)
context.items = frappe.db.sql("""select name,
{0} as title, creation
from `tab{1}`
where owner=%s and docstatus = 0
order by creation desc""".format(meta.title_field or "name",
self.doc_type), frappe.session.user, as_dict=True)
else:
name = frappe.db.get_value(self.doc_type, {"owner": frappe.session.user},
"name")
if name:
context.doc_name = name
if frappe.form_dict.name:
context.doc = frappe.get_doc(self.doc_type, frappe.form_dict.name)
context.types = [f.fieldtype for f in self.web_form_fields]
return context
def get_parents(self, context):
if self.breadcrumbs:
return json.loads(self.breadcrumbs)
@frappe.whitelist(allow_guest=True)
def accept():
args = frappe.form_dict
files = []
web_form = frappe.get_doc("Web Form", args.web_form)
if args.doctype != web_form.doc_type:
frappe.throw(_("Invalid Request"))
if args.name:
# update
doc = frappe.get_doc(args.doctype, args.name)
else:
# insert
doc = frappe.new_doc(args.doctype)
# set values
for fieldname, value in args.iteritems():
if fieldname not in ("web_form", "cmd", "owner"):
if value and value.startswith("{"):
try:
filedata = json.loads(value)
if "__file_attachment" in filedata:
files.append((fieldname, filedata))
continue
except ValueError:
pass
doc.set(fieldname, value)
if args.name:
if doc.owner==frappe.session.user:
doc.save(ignore_permissions=True)
else:
# only if permissions are present
doc.save()
else:
# insert
if web_form.login_required and frappe.session.user=="Guest":
frappe.throw(_("You must login to submit this form"))
doc.insert(ignore_permissions = True)
# add files
if files:
for f in files:
fieldname, filedata = f
# remove earlier attachmed file (if exists)
if doc.get(fieldname):
remove_file_by_url(doc.get(fieldname), doc.doctype, doc.name)
# save new file
filedoc = save_file(filedata["filename"], filedata["dataurl"],
doc.doctype, doc.name, decode=True)
# update values
doc.set(fieldname, filedoc.file_url)
doc.save()
@frappe.whitelist()
def delete(web_form, name):
web_form = frappe.get_doc("Web Form", web_form)
owner = frappe.db.get_value(web_form.doc_type, name, "owner")
if frappe.session.user == owner and web_form.allow_delete:
frappe.delete_doc(web_form.doc_type, name, ignore_permissions=True)
else:
raise frappe.PermissionError, "Not Allowed"
| mit |
barnsnake351/nova | nova/tests/unit/scheduler/filters/test_exact_core_filter.py | 41 | 1954 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.scheduler.filters import exact_core_filter
from nova import test
from nova.tests.unit.scheduler import fakes
class TestExactCoreFilter(test.NoDBTestCase):
def setUp(self):
super(TestExactCoreFilter, self).setUp()
self.filt_cls = exact_core_filter.ExactCoreFilter()
def test_exact_core_filter_passes(self):
filter_properties = {'instance_type': {'vcpus': 1}}
host = self._get_host({'vcpus_total': 3, 'vcpus_used': 2})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_exact_core_filter_fails(self):
filter_properties = {'instance_type': {'vcpus': 2}}
host = self._get_host({'vcpus_total': 3, 'vcpus_used': 2})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def test_exact_core_filter_passes_no_instance_type(self):
filter_properties = {}
host = self._get_host({'vcpus_total': 3, 'vcpus_used': 2})
self.assertTrue(self.filt_cls.host_passes(host, filter_properties))
def test_exact_core_filter_fails_host_vcpus_not_set(self):
filter_properties = {'instance_type': {'vcpus': 1}}
host = self._get_host({})
self.assertFalse(self.filt_cls.host_passes(host, filter_properties))
def _get_host(self, host_attributes):
return fakes.FakeHostState('host1', 'node1', host_attributes)
| apache-2.0 |
snnn/tensorflow | tensorflow/python/autograph/pyct/templates.py | 5 | 10477 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""AST conversion templates.
Adapted from Tangent.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ast
import textwrap
import gast
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import ast_util
from tensorflow.python.autograph.pyct import parser
from tensorflow.python.autograph.pyct import qual_names
class ReplaceTransformer(gast.NodeTransformer):
"""Replace AST nodes."""
def __init__(self, replacements):
"""Create a new ReplaceTransformer.
Args:
replacements: A mapping from placeholder names to (lists of) AST nodes
that these placeholders will be replaced by.
"""
self.replacements = replacements
self.in_replacements = False
self.preserved_annos = {
anno.Basic.ORIGIN,
anno.Basic.SKIP_PROCESSING,
anno.Static.ORIG_DEFINITIONS,
}
def _prepare_replacement(self, replaced, key):
"""Prepares a replacement AST that's safe to swap in for a node.
Args:
replaced: ast.AST, the node being replaced
key: Hashable, the key of the replacement AST
Returns:
ast.AST, the replacement AST
"""
repl = self.replacements[key]
new_nodes = ast_util.copy_clean(repl, preserve_annos=self.preserved_annos)
if isinstance(new_nodes, gast.AST):
new_nodes = [new_nodes]
return new_nodes
def visit_Expr(self, node):
# When replacing a placeholder with an entire statement, the replacement
# must stand on its own and not be wrapped in an Expr.
new_value = self.visit(node.value)
if new_value is node.value:
return node
return new_value
def visit_keyword(self, node):
if node.arg not in self.replacements:
return self.generic_visit(node)
repl = self._prepare_replacement(node, node.arg)
if isinstance(repl, gast.keyword):
return repl
elif (repl and isinstance(repl, (list, tuple)) and
all(isinstance(r, gast.keyword) for r in repl)):
return repl
# TODO(mdan): We may allow replacing with a string as well.
# For example, if one wanted to replace foo with bar in foo=baz, then
# we could allow changing just node arg, so that we end up with bar=baz.
raise ValueError(
'a keyword argument may only be replaced by another keyword or a '
'non-empty list of keywords. Found: %s' % repl)
def visit_FunctionDef(self, node):
node = self.generic_visit(node)
if node.name not in self.replacements:
return node
repl = self.replacements[node.name]
if not isinstance(repl, (gast.Name, ast.Name)):
raise ValueError(
'a function name can only be replaced by a Name node. Found: %s' %
repl)
node.name = repl.id
return node
def _check_has_context(self, node):
if not node.ctx:
raise ValueError('node %s is missing ctx value' % node)
# TODO(mdan): Rewrite _check and _set using a separate transformer.
def _check_inner_children_have_context(self, node):
if isinstance(node, gast.Attribute):
self._check_inner_children_have_context(node.value)
self._check_has_context(node)
elif isinstance(node, (gast.Tuple, gast.List)):
for e in node.elts:
self._check_inner_children_have_context(e)
self._check_has_context(node)
elif isinstance(node, gast.Dict):
for e in node.keys:
self._check_inner_children_have_context(e)
for e in node.values:
self._check_inner_children_have_context(e)
elif isinstance(node, gast.Index):
self._check_inner_children_have_context(node.value)
elif isinstance(node, gast.Subscript):
self._check_inner_children_have_context(node.value)
self._check_inner_children_have_context(node.slice)
elif isinstance(node, gast.Slice):
self._check_inner_children_have_context(node.lower)
if node.upper:
self._check_inner_children_have_context(node.upper)
if node.step:
self._check_inner_children_have_context(node.step)
elif isinstance(node, gast.BinOp):
self._check_inner_children_have_context(node.left)
self._check_inner_children_have_context(node.right)
elif isinstance(node, gast.UnaryOp):
self._check_inner_children_have_context(node.operand)
elif isinstance(node, gast.Name):
self._check_has_context(node)
elif isinstance(node, (gast.Str, gast.Num)):
pass
else:
raise ValueError('unexpected node type "%s"' % node)
def _set_inner_child_context(self, node, ctx):
if isinstance(node, gast.Attribute):
self._set_inner_child_context(node.value, gast.Load())
node.ctx = ctx
elif isinstance(node, (gast.Tuple, gast.List)):
for e in node.elts:
self._set_inner_child_context(e, ctx)
node.ctx = ctx
elif isinstance(node, gast.Name):
node.ctx = ctx
elif isinstance(node, gast.Call):
self._set_inner_child_context(node.func, ctx)
# We may be able to override these to Load(), but for now it's simpler
# to just assert that they're set.
for a in node.args:
self._check_inner_children_have_context(a)
for k in node.keywords:
self._check_inner_children_have_context(k.value)
elif isinstance(node, gast.Dict):
# We may be able to override these to Load(), but for now it's simpler
# to just assert that they're set.
for e in node.keys:
self._check_inner_children_have_context(e)
for e in node.values:
self._check_inner_children_have_context(e)
elif isinstance(node, gast.Subscript):
self._set_inner_child_context(node.value, ctx)
self._check_inner_children_have_context(node.slice)
elif isinstance(node, gast.BinOp):
self._check_inner_children_have_context(node.left)
self._check_inner_children_have_context(node.right)
elif isinstance(node, gast.UnaryOp):
self._check_inner_children_have_context(node.operand)
elif isinstance(node, (gast.Str, gast.Num)):
pass
else:
raise ValueError('unexpected node type "%s"' % node)
def visit_Attribute(self, node):
node = self.generic_visit(node)
if node.attr not in self.replacements:
return node
repl = self.replacements[node.attr]
if not isinstance(repl, gast.Name):
raise ValueError(
'An attribute can only be replaced by a Name node. Found: %s' % repl)
node.attr = repl.id
return node
def visit_Name(self, node):
if node.id not in self.replacements:
return node
new_nodes = self._prepare_replacement(node, node.id)
# Preserve the target context.
for n in new_nodes:
if isinstance(n, (gast.Tuple, gast.List)):
for e in n.elts:
self._set_inner_child_context(e, node.ctx)
if isinstance(n, gast.Attribute):
# For attributes, the inner Name node receives the context, while the
# outer ones have it set to Load.
self._set_inner_child_context(n, node.ctx)
else:
n.ctx = node.ctx
if len(new_nodes) == 1:
new_nodes, = new_nodes
return new_nodes
def _convert_to_ast(n):
"""Converts from a known data type to AST."""
if isinstance(n, str):
# Note: the node will receive the ctx value from the template, see
# ReplaceTransformer.visit_Name.
return gast.Name(id=n, ctx=None, annotation=None)
if isinstance(n, qual_names.QN):
return n.ast()
if isinstance(n, list):
return [_convert_to_ast(e) for e in n]
if isinstance(n, tuple):
return tuple(_convert_to_ast(e) for e in n)
return n
def replace(template, **replacements):
"""Replaces placeholders in a Python template.
AST Name and Tuple nodes always receive the context that inferred from
the template. However, when replacing more complex nodes (that can potentially
contain Name children), then the caller is responsible for setting the
appropriate context.
Args:
template: A string representing Python code. Any symbol name can be used
that appears in the template code can be used as placeholder.
**replacements: A mapping from placeholder names to (lists of) AST nodes
that these placeholders will be replaced by. String values are also
supported as a shorthand for AST Name nodes with the respective ID.
Returns:
An AST node or list of AST nodes with the replacements made. If the
template was a function, a list will be returned. If the template was a
node, the same node will be returned. If the template was a string, an
AST node will be returned (a `Module` node in the case of a multi-line
string, an `Expr` node otherwise).
Raises:
ValueError: if the arguments are incorrect.
"""
if not isinstance(template, str):
raise ValueError('Expected string template, got %s' % type(template))
tree = parser.parse_str(textwrap.dedent(template))
for k in replacements:
replacements[k] = _convert_to_ast(replacements[k])
results = ReplaceTransformer(replacements).visit(tree).body
if isinstance(results, list):
return [qual_names.resolve(r) for r in results]
return qual_names.resolve(results)
def replace_as_expression(template, **replacements):
"""Variant of replace that generates expressions, instead of code blocks."""
replacement = replace(template, **replacements)
if len(replacement) != 1:
raise ValueError(
'single expression expected; for more general templates use replace')
node = replacement[0]
node = qual_names.resolve(node)
if isinstance(node, gast.Expr):
return node.value
elif isinstance(node, gast.Name):
return node
raise ValueError(
'the template is expected to generate an expression or a name node;'
' instead found %s' % node)
| apache-2.0 |
0ps/wfuzz | src/wfuzz/mixins.py | 1 | 1871 | from .plugin_api.urlutils import parse_url
from .exception import FuzzExceptBadInstall
# python 2 and 3
import sys
if sys.version_info >= (3, 0):
from urllib.parse import urljoin
else:
from urlparse import urljoin
class FuzzRequestSoupMixing(object):
def get_soup(self):
try:
from bs4 import BeautifulSoup
except ImportError:
raise FuzzExceptBadInstall("You need to install beautifulsoup4 first!")
soup = BeautifulSoup(self.content, 'html.parser')
return soup
class FuzzRequestUrlMixing(object):
# urlparse functions
@property
def urlparse(self):
return parse_url(self.url)
@property
def urlp(self):
return parse_url(self.url)
@property
def pstrip(self):
return self.to_cache_key()
@property
def is_path(self):
if self.code == 200 and self.url[-1] == '/':
return True
elif self.code >= 300 and self.code < 400:
if "Location" in self.headers.response and self.headers.response["Location"][-1] == '/':
return True
elif self.code == 401:
if self.url[-1] == '/':
return True
return False
@property
def recursive_url(self):
if self.code >= 300 and self.code < 400 and "Location" in self.headers.response:
new_url = self.headers.response["Location"]
if not new_url[-1] == '/':
new_url += "/"
# taking into consideration redirections to /xxx/ without full URL
new_url = urljoin(self.url, new_url)
elif self.code == 401 or self.code == 200:
new_url = self.url
if not self.url[-1] == '/':
new_url = "/"
else:
raise Exception("Error generating recursive url")
return new_url + "FUZZ"
| gpl-2.0 |
adlnet-archive/edx-platform | common/djangoapps/lang_pref/tests/test_api.py | 42 | 1052 | # -*- coding: utf-8 -*-
""" Tests for the language API. """
from django.test import TestCase
import ddt
from lang_pref import api as language_api
@ddt.ddt
class LanguageApiTest(TestCase):
INVALID_LANGUAGE_CODES = ['', 'foo']
def test_released_languages(self):
released_languages = language_api.released_languages()
self.assertGreaterEqual(len(released_languages), 1)
def test_preferred_language(self):
preferred_language = language_api.preferred_language('fr')
self.assertEqual(preferred_language, language_api.Language('fr', u'Français'))
@ddt.data(*INVALID_LANGUAGE_CODES)
def test_invalid_preferred_language(self, language_code):
preferred_language = language_api.preferred_language(language_code)
self.assertEqual(preferred_language, language_api.Language('en', u'English'))
def test_no_preferred_language(self):
preferred_language = language_api.preferred_language(None)
self.assertEqual(preferred_language, language_api.Language('en', u'English'))
| agpl-3.0 |
bigzz/autotest | client/shared/distro.py | 2 | 11636 | """
This module provides the client facilities to detect the Linux Distribution
it's running under.
This is a replacement for the get_os_vendor() function from the utils module.
"""
import os
import re
import platform
__all__ = ['LinuxDistro',
'UNKNOWN_DISTRO_NAME',
'UNKNOWN_DISTRO_VERSION',
'UNKNOWN_DISTRO_RELEASE',
'UNKNOWN_DISTRO_ARCH',
'Probe',
'register_probe',
'detect']
# pylint: disable=R0903
class LinuxDistro(object):
'''
Simple collection of information for a Linux Distribution
'''
def __init__(self, name, version, release, arch):
'''
Initializes a new Linux Distro
:param name: a short name that precisely distinguishes this Linux
Distribution among all others.
:type name: str
:param version: the major version of the distribution. Usually this
is a single number that denotes a large development
cycle and support file.
:type version: str
:param release: the release or minor version of the distribution.
Usually this is also a single number, that is often
omitted or starts with a 0 when the major version
is initially release. It's ofter associated with a
shorter development cycle that contains incremental
a collection of improvements and fixes.
:type release: str
:param arch: the main target for this Linux Distribution. It's common
for some architectures to ship with packages for
previous and still compatible architectures, such as it's
the case with Intel/AMD 64 bit architecture that support
32 bit code. In cases like this, this should be set to
the 64 bit architecture name.
:type arch: str
'''
self.name = name
self.version = version
self.release = release
self.arch = arch
def __repr__(self):
return '<LinuxDistro: name=%s, version=%s, release=%s, arch=%s>' % (
self.name, self.version, self.release, self.arch)
UNKNOWN_DISTRO_NAME = 'unknown'
UNKNOWN_DISTRO_VERSION = 0
UNKNOWN_DISTRO_RELEASE = 0
UNKNOWN_DISTRO_ARCH = 'unknown'
#: The distribution that is used when the exact one could not be found
UNKNOWN_DISTRO = LinuxDistro(UNKNOWN_DISTRO_NAME,
UNKNOWN_DISTRO_VERSION,
UNKNOWN_DISTRO_RELEASE,
UNKNOWN_DISTRO_ARCH)
class Probe(object):
'''
Probes the machine and does it best to confirm it's the right distro
'''
#: Points to a file that can determine if this machine is running a given
#: Linux Distribution. This servers a first check that enables the extra
#: checks to carry on.
CHECK_FILE = None
#: Sets the content that should be checked on the file pointed to by
#: :attr:`CHECK_FILE_EXISTS`. Leave it set to `None` (its default)
#: to check only if the file exists, and not check its contents
CHECK_FILE_CONTAINS = None
#: The name of the Linux Distribution to be returned if the file defined
#: by :attr:`CHECK_FILE_EXISTS` exist.
CHECK_FILE_DISTRO_NAME = None
#: A regular expresion that will be run on the file pointed to by
#: :attr:`CHECK_FILE_EXISTS`
CHECK_VERSION_REGEX = None
def __init__(self):
self.score = 0
def check_name_for_file(self):
'''
Checks if this class will look for a file and return a distro
The conditions that must be true include the file that identifies the
distro file being set (:attr:`CHECK_FILE`) and the name of the
distro to be returned (:attr:`CHECK_FILE_DISTRO_NAME`)
'''
if self.CHECK_FILE is None:
return False
if self.CHECK_FILE_DISTRO_NAME is None:
return False
return True
def name_for_file(self):
'''
Get the distro name if the :attr:`CHECK_FILE` is set and exists
'''
if self.check_name_for_file():
if os.path.exists(self.CHECK_FILE):
return self.CHECK_FILE_DISTRO_NAME
def check_name_for_file_contains(self):
'''
Checks if this class will look for text on a file and return a distro
The conditions that must be true include the file that identifies the
distro file being set (:attr:`CHECK_FILE`), the text to look for
inside the distro file (:attr:`CHECK_FILE_CONTAINS`) and the name
of the distro to be returned (:attr:`CHECK_FILE_DISTRO_NAME`)
'''
if self.CHECK_FILE is None:
return False
if self.CHECK_FILE_CONTAINS is None:
return False
if self.CHECK_FILE_DISTRO_NAME is None:
return False
return True
def name_for_file_contains(self):
'''
Get the distro if the :attr:`CHECK_FILE` is set and has content
'''
if self.check_name_for_file_contains():
if os.path.exists(self.CHECK_FILE):
for line in open(self.CHECK_FILE).readlines():
if self.CHECK_FILE_CONTAINS in line:
return self.CHECK_FILE_DISTRO_NAME
def check_version(self):
'''
Checks if this class will look for a regex in file and return a distro
'''
if self.CHECK_FILE is None:
return False
if self.CHECK_VERSION_REGEX is None:
return False
return True
def _get_version_match(self):
'''
Returns the match result for the version regex on the file content
'''
if self.check_version():
if os.path.exists(self.CHECK_FILE):
version_file_content = open(self.CHECK_FILE).read()
else:
return None
return self.CHECK_VERSION_REGEX.match(version_file_content)
def version(self):
'''
Returns the version of the distro
'''
version = UNKNOWN_DISTRO_VERSION
match = self._get_version_match()
if match is not None:
if match.groups() > 0:
version = match.groups()[0]
return version
def check_release(self):
'''
Checks if this has the conditions met to look for the release number
'''
return (self.check_version() and
self.CHECK_VERSION_REGEX.groups > 1)
def release(self):
'''
Returns the release of the distro
'''
release = UNKNOWN_DISTRO_RELEASE
match = self._get_version_match()
if match is not None:
if match.groups() > 1:
release = match.groups()[1]
return release
def get_distro(self):
'''
Returns the :class:`LinuxDistro` this probe detected
'''
name = None
version = UNKNOWN_DISTRO_VERSION
release = UNKNOWN_DISTRO_RELEASE
arch = UNKNOWN_DISTRO_ARCH
distro = None
if self.check_name_for_file():
name = self.name_for_file()
self.score += 1
if self.check_name_for_file_contains():
name = self.name_for_file_contains()
self.score += 1
if self.check_version():
version = self.version()
self.score += 1
if self.check_release():
release = self.release()
self.score += 1
# can't think of a better way to do this
arch = os.uname()[4]
# name is the first thing that should be identified. If we don't know
# the distro name, we don't bother checking for versions
if name is not None:
distro = LinuxDistro(name, version, release, arch)
else:
distro = UNKNOWN_DISTRO
return distro
class StdLibProbe(Probe):
'''
Probe that uses the Python standard library builtin detection
This Probe has a lower score on purporse, serving as a fallback
if no explicit (and hopefully more accurate) probe exists.
'''
def get_distro(self):
name = None
version = UNKNOWN_DISTRO_VERSION
release = UNKNOWN_DISTRO_RELEASE
arch = UNKNOWN_DISTRO_ARCH
d_name, d_version_release, d_codename = platform.dist()
if d_name:
name = d_name
if '.' in d_version_release:
d_version, d_release = d_version_release.split('.', 1)
version = d_version
release = d_release
else:
version = d_version_release
arch = os.uname()[4]
if name is not None:
distro = LinuxDistro(name, version, release, arch)
else:
distro = UNKNOWN_DISTRO
return distro
class RedHatProbe(Probe):
'''
Probe with version checks for Red Hat Enterprise Linux systems
'''
CHECK_FILE = '/etc/redhat-release'
CHECK_FILE_CONTAINS = 'Red Hat'
CHECK_FILE_DISTRO_NAME = 'redhat'
CHECK_VERSION_REGEX = re.compile(
r'Red Hat Enterprise Linux Server release (\d{1,2})\.(\d{1,2}).*')
class CentosProbe(RedHatProbe):
'''
Probe with version checks for CentOS systems
'''
CHECK_FILE = '/etc/redhat-release'
CHECK_FILE_CONTAINS = 'CentOS'
CHECK_FILE_DISTRO_NAME = 'centos'
CHECK_VERSION_REGEX = re.compile(r'CentOS release (\d{1,2})\.(\d{1,2}).*')
class FedoraProbe(RedHatProbe):
'''
Probe with version checks for Fedora systems
'''
CHECK_FILE = '/etc/fedora-release'
CHECK_FILE_CONTAINS = 'Fedora'
CHECK_FILE_DISTRO_NAME = 'fedora'
CHECK_VERSION_REGEX = re.compile(r'Fedora release (\d{1,2}).*')
class DebianProbe(Probe):
'''
Simple probe with file checks for Debian systems
'''
CHECK_FILE = '/etc/debian-version'
CHECK_FILE_DISTRO_NAME = 'debian'
class SuseProbe(Probe):
CHECK_FILE = '/etc/SuSE-release'
CHECK_FILE_DISTRO_NAME = 'sles'
CHECK_VERSION_REGEX = re.compile(r'SUSE.*\nVERSION = (.*)\nPATCHLEVEL = (.*)')
#: the complete list of probes that have been registered
REGISTERED_PROBES = []
def register_probe(probe_class):
'''
Register a probe to be run during autodetection
'''
if probe_class not in REGISTERED_PROBES:
REGISTERED_PROBES.append(probe_class)
register_probe(RedHatProbe)
register_probe(CentosProbe)
register_probe(FedoraProbe)
register_probe(DebianProbe)
register_probe(StdLibProbe)
register_probe(SuseProbe)
def detect():
'''
Attempts to detect the Linux Distribution running on this machine
:returns: the detected :class:`LinuxDistro` or :data:`UNKNOWN_DISTRO`
:rtype: :class:`LinuxDistro`
'''
results = []
for probe_class in REGISTERED_PROBES:
probe_instance = probe_class()
distro_result = probe_instance.get_distro()
if distro_result is not UNKNOWN_DISTRO:
results.append((distro_result, probe_instance))
results.sort(key=lambda t: t[1].score)
if len(results) > 0:
distro = results[-1][0]
else:
distro = UNKNOWN_DISTRO
return distro
class Spec(object):
'''
Describes a distro, usually for setting minimum distro requirements
'''
def __init__(self, name, min_version=None, min_release=None, arch=None):
self.name = name
self.min_version = min_version
self.min_release = min_release
self.arch = arch
| gpl-2.0 |
Bago213/materia | qa/rpc-tests/getblocktemplate_proposals.py | 2 | 5726 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import MateriaTestFramework
from test_framework.util import *
from binascii import a2b_hex, b2a_hex
from hashlib import sha256
from struct import pack
def b2x(b):
return b2a_hex(b).decode('ascii')
# NOTE: This does not work for signed numbers (set the high bit) or zero (use b'\0')
def encodeUNum(n):
s = bytearray(b'\1')
while n > 127:
s[0] += 1
s.append(n % 256)
n //= 256
s.append(n)
return bytes(s)
def varlenEncode(n):
if n < 0xfd:
return pack('<B', n)
if n <= 0xffff:
return b'\xfd' + pack('<H', n)
if n <= 0xffffffff:
return b'\xfe' + pack('<L', n)
return b'\xff' + pack('<Q', n)
def dblsha(b):
return sha256(sha256(b).digest()).digest()
def genmrklroot(leaflist):
cur = leaflist
while len(cur) > 1:
n = []
if len(cur) & 1:
cur.append(cur[-1])
for i in range(0, len(cur), 2):
n.append(dblsha(cur[i] + cur[i+1]))
cur = n
return cur[0]
def template_to_bytearray(tmpl, txlist):
blkver = pack('<L', tmpl['version'])
mrklroot = genmrklroot(list(dblsha(a) for a in txlist))
timestamp = pack('<L', tmpl['curtime'])
nonce = b'\0\0\0\0'
blk = blkver + a2b_hex(tmpl['previousblockhash'])[::-1] + mrklroot + timestamp + a2b_hex(tmpl['bits'])[::-1] + nonce
blk += varlenEncode(len(txlist))
for tx in txlist:
blk += tx
return bytearray(blk)
def template_to_hex(tmpl, txlist):
return b2x(template_to_bytearray(tmpl, txlist))
def assert_template(node, tmpl, txlist, expect):
rsp = node.getblocktemplate({'data':template_to_hex(tmpl, txlist),'mode':'proposal'})
if rsp != expect:
raise AssertionError('unexpected: %s' % (rsp,))
class GetBlockTemplateProposalTest(MateriaTestFramework):
'''
Test block proposals with getblocktemplate.
'''
def __init__(self):
super().__init__()
self.num_nodes = 2
self.setup_clean_chain = False
def setup_network(self):
self.nodes = self.setup_nodes()
connect_nodes_bi(self.nodes, 0, 1)
def run_test(self):
node = self.nodes[0]
node.generate(1) # Mine a block to leave initial block download
tmpl = node.getblocktemplate()
if 'coinbasetxn' not in tmpl:
rawcoinbase = encodeUNum(tmpl['height'])
rawcoinbase += b'\x01-'
hexcoinbase = b2x(rawcoinbase)
hexoutval = b2x(pack('<Q', tmpl['coinbasevalue']))
tmpl['coinbasetxn'] = {'data': '01000000' + '01' + '0000000000000000000000000000000000000000000000000000000000000000ffffffff' + ('%02x' % (len(rawcoinbase),)) + hexcoinbase + 'fffffffe' + '01' + hexoutval + '00' + '00000000'}
txlist = list(bytearray(a2b_hex(a['data'])) for a in (tmpl['coinbasetxn'],) + tuple(tmpl['transactions']))
# Test 0: Capability advertised
assert('proposal' in tmpl['capabilities'])
# NOTE: This test currently FAILS (regtest mode doesn't enforce block height in coinbase)
## Test 1: Bad height in coinbase
#txlist[0][4+1+36+1+1] += 1
#assert_template(node, tmpl, txlist, 'FIXME')
#txlist[0][4+1+36+1+1] -= 1
# Test 2: Bad input hash for gen tx
txlist[0][4+1] += 1
assert_template(node, tmpl, txlist, 'bad-cb-missing')
txlist[0][4+1] -= 1
# Test 3: Truncated final tx
lastbyte = txlist[-1].pop()
assert_raises(JSONRPCException, assert_template, node, tmpl, txlist, 'n/a')
txlist[-1].append(lastbyte)
# Test 4: Add an invalid tx to the end (duplicate of gen tx)
txlist.append(txlist[0])
assert_template(node, tmpl, txlist, 'bad-txns-duplicate')
txlist.pop()
# Test 5: Add an invalid tx to the end (non-duplicate)
txlist.append(bytearray(txlist[0]))
txlist[-1][4+1] = 0xff
assert_template(node, tmpl, txlist, 'bad-txns-inputs-missingorspent')
txlist.pop()
# Test 6: Future tx lock time
txlist[0][-4:] = b'\xff\xff\xff\xff'
assert_template(node, tmpl, txlist, 'bad-txns-nonfinal')
txlist[0][-4:] = b'\0\0\0\0'
# Test 7: Bad tx count
txlist.append(b'')
assert_raises(JSONRPCException, assert_template, node, tmpl, txlist, 'n/a')
txlist.pop()
# Test 8: Bad bits
realbits = tmpl['bits']
tmpl['bits'] = '1c0000ff' # impossible in the real world
assert_template(node, tmpl, txlist, 'bad-diffbits')
tmpl['bits'] = realbits
# Test 9: Bad merkle root
rawtmpl = template_to_bytearray(tmpl, txlist)
rawtmpl[4+32] = (rawtmpl[4+32] + 1) % 0x100
rsp = node.getblocktemplate({'data':b2x(rawtmpl),'mode':'proposal'})
if rsp != 'bad-txnmrklroot':
raise AssertionError('unexpected: %s' % (rsp,))
# Test 10: Bad timestamps
realtime = tmpl['curtime']
tmpl['curtime'] = 0x7fffffff
assert_template(node, tmpl, txlist, 'time-too-new')
tmpl['curtime'] = 0
assert_template(node, tmpl, txlist, 'time-too-old')
tmpl['curtime'] = realtime
# Test 11: Valid block
assert_template(node, tmpl, txlist, None)
# Test 12: Orphan block
tmpl['previousblockhash'] = 'ff00' * 16
assert_template(node, tmpl, txlist, 'inconclusive-not-best-prevblk')
if __name__ == '__main__':
GetBlockTemplateProposalTest().main()
| mit |
okwow123/djangol2 | example/env/lib/python2.7/site-packages/requests/packages/__init__.py | 838 | 1384 | '''
Debian and other distributions "unbundle" requests' vendored dependencies, and
rewrite all imports to use the global versions of ``urllib3`` and ``chardet``.
The problem with this is that not only requests itself imports those
dependencies, but third-party code outside of the distros' control too.
In reaction to these problems, the distro maintainers replaced
``requests.packages`` with a magical "stub module" that imports the correct
modules. The implementations were varying in quality and all had severe
problems. For example, a symlink (or hardlink) that links the correct modules
into place introduces problems regarding object identity, since you now have
two modules in `sys.modules` with the same API, but different identities::
requests.packages.urllib3 is not urllib3
With version ``2.5.2``, requests started to maintain its own stub, so that
distro-specific breakage would be reduced to a minimum, even though the whole
issue is not requests' fault in the first place. See
https://github.com/kennethreitz/requests/pull/2375 for the corresponding pull
request.
'''
from __future__ import absolute_import
import sys
try:
from . import urllib3
except ImportError:
import urllib3
sys.modules['%s.urllib3' % __name__] = urllib3
try:
from . import chardet
except ImportError:
import chardet
sys.modules['%s.chardet' % __name__] = chardet
| mit |
westinedu/similarinterest | django/core/management/commands/diffsettings.py | 411 | 1296 | from django.core.management.base import NoArgsCommand
def module_to_dict(module, omittable=lambda k: k.startswith('_')):
"Converts a module namespace to a Python dictionary. Used by get_settings_diff."
return dict([(k, repr(v)) for k, v in module.__dict__.items() if not omittable(k)])
class Command(NoArgsCommand):
help = """Displays differences between the current settings.py and Django's
default settings. Settings that don't appear in the defaults are
followed by "###"."""
requires_model_validation = False
def handle_noargs(self, **options):
# Inspired by Postfix's "postconf -n".
from django.conf import settings, global_settings
# Because settings are imported lazily, we need to explicitly load them.
settings._setup()
user_settings = module_to_dict(settings._wrapped)
default_settings = module_to_dict(global_settings)
output = []
keys = user_settings.keys()
keys.sort()
for key in keys:
if key not in default_settings:
output.append("%s = %s ###" % (key, user_settings[key]))
elif user_settings[key] != default_settings[key]:
output.append("%s = %s" % (key, user_settings[key]))
return '\n'.join(output)
| bsd-3-clause |
madan96/sympy | sympy/core/exprtools.py | 9 | 49411 | """Tools for manipulating of large commutative expressions. """
from __future__ import print_function, division
from sympy.core.add import Add
from sympy.core.compatibility import iterable, is_sequence, SYMPY_INTS, range
from sympy.core.mul import Mul, _keep_coeff
from sympy.core.power import Pow
from sympy.core.basic import Basic, preorder_traversal
from sympy.core.expr import Expr
from sympy.core.sympify import sympify
from sympy.core.numbers import Rational, Integer, Number, I
from sympy.core.singleton import S
from sympy.core.symbol import Dummy
from sympy.core.coreerrors import NonCommutativeExpression
from sympy.core.containers import Tuple, Dict
from sympy.utilities import default_sort_key
from sympy.utilities.iterables import (common_prefix, common_suffix,
variations, ordered)
from collections import defaultdict
_eps = Dummy(positive=True)
def _isnumber(i):
return isinstance(i, (SYMPY_INTS, float)) or i.is_Number
def _monotonic_sign(self):
"""Return the value closest to 0 that ``self`` may have if all symbols
are signed and the result is uniformly the same sign for all values of symbols.
If a symbol is only signed but not known to be an
integer or the result is 0 then a symbol representative of the sign of self
will be returned. Otherwise, None is returned if a) the sign could be positive
or negative or b) self is not in one of the following forms:
- L(x, y, ...) + A: a function linear in all symbols x, y, ... with an
additive constant; if A is zero then the function can be a monomial whose
sign is monotonic over the range of the variables, e.g. (x + 1)**3 if x is
nonnegative.
- A/L(x, y, ...) + B: the inverse of a function linear in all symbols x, y, ...
that does not have a sign change from positive to negative for any set
of values for the variables.
- M(x, y, ...) + A: a monomial M whose factors are all signed and a constant, A.
- A/M(x, y, ...) + B: the inverse of a monomial and constants A and B.
- P(x): a univariate polynomial
Examples
========
>>> from sympy.core.exprtools import _monotonic_sign as F
>>> from sympy import Dummy, S
>>> nn = Dummy(integer=True, nonnegative=True)
>>> p = Dummy(integer=True, positive=True)
>>> p2 = Dummy(integer=True, positive=True)
>>> F(nn + 1)
1
>>> F(p - 1)
_nneg
>>> F(nn*p + 1)
1
>>> F(p2*p + 1)
2
>>> F(nn - 1) # could be negative, zero or positive
"""
if not self.is_real:
return
if (-self).is_Symbol:
rv = _monotonic_sign(-self)
return rv if rv is None else -rv
if not self.is_Add and self.as_numer_denom()[1].is_number:
s = self
if s.is_prime:
if s.is_odd:
return S(3)
else:
return S(2)
elif s.is_positive:
if s.is_even:
return S(2)
elif s.is_integer:
return S.One
else:
return _eps
elif s.is_negative:
if s.is_even:
return S(-2)
elif s.is_integer:
return S.NegativeOne
else:
return -_eps
if s.is_zero or s.is_nonpositive or s.is_nonnegative:
return S.Zero
return None
# univariate polynomial
free = self.free_symbols
if len(free) == 1:
if self.is_polynomial():
from sympy.polys.polytools import real_roots
from sympy.polys.polyroots import roots
from sympy.polys.polyerrors import PolynomialError
x = free.pop()
x0 = _monotonic_sign(x)
if x0 == _eps or x0 == -_eps:
x0 = S.Zero
if x0 is not None:
d = self.diff(x)
if d.is_number:
roots = []
else:
try:
roots = real_roots(d)
except (PolynomialError, NotImplementedError):
roots = [r for r in roots(d, x) if r.is_real]
y = self.subs(x, x0)
if x.is_nonnegative and all(r <= x0 for r in roots):
if y.is_nonnegative and d.is_positive:
if y:
return y if y.is_positive else Dummy('pos', positive=True)
else:
return Dummy('nneg', nonnegative=True)
if y.is_nonpositive and d.is_negative:
if y:
return y if y.is_negative else Dummy('neg', negative=True)
else:
return Dummy('npos', nonpositive=True)
elif x.is_nonpositive and all(r >= x0 for r in roots):
if y.is_nonnegative and d.is_negative:
if y:
return Dummy('pos', positive=True)
else:
return Dummy('nneg', nonnegative=True)
if y.is_nonpositive and d.is_positive:
if y:
return Dummy('neg', negative=True)
else:
return Dummy('npos', nonpositive=True)
else:
n, d = self.as_numer_denom()
den = None
if n.is_number:
den = _monotonic_sign(d)
elif not d.is_number:
if _monotonic_sign(n) is not None:
den = _monotonic_sign(d)
if den is not None and (den.is_positive or den.is_negative):
v = n*den
if v.is_positive:
return Dummy('pos', positive=True)
elif v.is_nonnegative:
return Dummy('nneg', nonnegative=True)
elif v.is_negative:
return Dummy('neg', negative=True)
elif v.is_nonpositive:
return Dummy('npos', nonpositive=True)
return None
# multivariate
c, a = self.as_coeff_Add()
v = None
if not a.is_polynomial():
# F/A or A/F where A is a number and F is a signed, rational monomial
n, d = a.as_numer_denom()
if not (n.is_number or d.is_number):
return
if (
a.is_Mul or a.is_Pow) and \
a.is_rational and \
all(p.exp.is_Integer for p in a.atoms(Pow) if p.is_Pow) and \
(a.is_positive or a.is_negative):
v = S(1)
for ai in Mul.make_args(a):
if ai.is_number:
v *= ai
continue
reps = {}
for x in ai.free_symbols:
reps[x] = _monotonic_sign(x)
if reps[x] is None:
return
v *= ai.subs(reps)
elif c:
# signed linear expression
if not any(p for p in a.atoms(Pow) if not p.is_number) and (a.is_nonpositive or a.is_nonnegative):
free = list(a.free_symbols)
p = {}
for i in free:
v = _monotonic_sign(i)
if v is None:
return
p[i] = v or (_eps if i.is_nonnegative else -_eps)
v = a.xreplace(p)
if v is not None:
rv = v + c
if v.is_nonnegative and rv.is_positive:
return rv.subs(_eps, 0)
if v.is_nonpositive and rv.is_negative:
return rv.subs(_eps, 0)
def decompose_power(expr):
"""
Decompose power into symbolic base and integer exponent.
This is strictly only valid if the exponent from which
the integer is extracted is itself an integer or the
base is positive. These conditions are assumed and not
checked here.
Examples
========
>>> from sympy.core.exprtools import decompose_power
>>> from sympy.abc import x, y
>>> decompose_power(x)
(x, 1)
>>> decompose_power(x**2)
(x, 2)
>>> decompose_power(x**(2*y))
(x**y, 2)
>>> decompose_power(x**(2*y/3))
(x**(y/3), 2)
"""
base, exp = expr.as_base_exp()
if exp.is_Number:
if exp.is_Rational:
if not exp.is_Integer:
base = Pow(base, Rational(1, exp.q))
exp = exp.p
else:
base, exp = expr, 1
else:
exp, tail = exp.as_coeff_Mul(rational=True)
if exp is S.NegativeOne:
base, exp = Pow(base, tail), -1
elif exp is not S.One:
tail = _keep_coeff(Rational(1, exp.q), tail)
base, exp = Pow(base, tail), exp.p
else:
base, exp = expr, 1
return base, exp
def decompose_power_rat(expr):
"""
Decompose power into symbolic base and rational exponent.
"""
base, exp = expr.as_base_exp()
if exp.is_Number:
if not exp.is_Rational:
base, exp = expr, 1
else:
exp, tail = exp.as_coeff_Mul(rational=True)
if exp is S.NegativeOne:
base, exp = Pow(base, tail), -1
elif exp is not S.One:
tail = _keep_coeff(Rational(1, exp.q), tail)
base, exp = Pow(base, tail), exp.p
else:
base, exp = expr, 1
return base, exp
class Factors(object):
"""Efficient representation of ``f_1*f_2*...*f_n``."""
__slots__ = ['factors', 'gens']
def __init__(self, factors=None): # Factors
"""Initialize Factors from dict or expr.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x
>>> from sympy import I
>>> e = 2*x**3
>>> Factors(e)
Factors({2: 1, x: 3})
>>> Factors(e.as_powers_dict())
Factors({2: 1, x: 3})
>>> f = _
>>> f.factors # underlying dictionary
{2: 1, x: 3}
>>> f.gens # base of each factor
frozenset({2, x})
>>> Factors(0)
Factors({0: 1})
>>> Factors(I)
Factors({I: 1})
Notes
=====
Although a dictionary can be passed, only minimal checking is
performed: powers of -1 and I are made canonical.
"""
if isinstance(factors, (SYMPY_INTS, float)):
factors = S(factors)
if isinstance(factors, Factors):
factors = factors.factors.copy()
elif factors is None or factors is S.One:
factors = {}
elif factors is S.Zero or factors == 0:
factors = {S.Zero: S.One}
elif isinstance(factors, Number):
n = factors
factors = {}
if n < 0:
factors[S.NegativeOne] = S.One
n = -n
if n is not S.One:
if n.is_Float or n.is_Integer or n is S.Infinity:
factors[n] = S.One
elif n.is_Rational:
# since we're processing Numbers, the denominator is
# stored with a negative exponent; all other factors
# are left .
if n.p != 1:
factors[Integer(n.p)] = S.One
factors[Integer(n.q)] = S.NegativeOne
else:
raise ValueError('Expected Float|Rational|Integer, not %s' % n)
elif isinstance(factors, Basic) and not factors.args:
factors = {factors: S.One}
elif isinstance(factors, Expr):
c, nc = factors.args_cnc()
i = c.count(I)
for _ in range(i):
c.remove(I)
factors = dict(Mul._from_args(c).as_powers_dict())
if i:
factors[I] = S.One*i
if nc:
factors[Mul(*nc, evaluate=False)] = S.One
else:
factors = factors.copy() # /!\ should be dict-like
# tidy up -/+1 and I exponents if Rational
handle = []
for k in factors:
if k is I or k in (-1, 1):
handle.append(k)
if handle:
i1 = S.One
for k in handle:
if not _isnumber(factors[k]):
continue
i1 *= k**factors.pop(k)
if i1 is not S.One:
for a in i1.args if i1.is_Mul else [i1]: # at worst, -1.0*I*(-1)**e
if a is S.NegativeOne:
factors[a] = S.One
elif a is I:
factors[I] = S.One
elif a.is_Pow:
if S.NegativeOne not in factors:
factors[S.NegativeOne] = S.Zero
factors[S.NegativeOne] += a.exp
elif a == 1:
factors[a] = S.One
elif a == -1:
factors[-a] = S.One
factors[S.NegativeOne] = S.One
else:
raise ValueError('unexpected factor in i1: %s' % a)
self.factors = factors
try:
self.gens = frozenset(factors.keys())
except AttributeError:
raise TypeError('expecting Expr or dictionary')
def __hash__(self): # Factors
keys = tuple(ordered(self.factors.keys()))
values = [self.factors[k] for k in keys]
return hash((keys, values))
def __repr__(self): # Factors
return "Factors({%s})" % ', '.join(
['%s: %s' % (k, v) for k, v in ordered(self.factors.items())])
@property
def is_zero(self): # Factors
"""
>>> from sympy.core.exprtools import Factors
>>> Factors(0).is_zero
True
"""
f = self.factors
return len(f) == 1 and S.Zero in f
@property
def is_one(self): # Factors
"""
>>> from sympy.core.exprtools import Factors
>>> Factors(1).is_one
True
"""
return not self.factors
def as_expr(self): # Factors
"""Return the underlying expression.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y
>>> Factors((x*y**2).as_powers_dict()).as_expr()
x*y**2
"""
args = []
for factor, exp in self.factors.items():
if exp != 1:
b, e = factor.as_base_exp()
if isinstance(exp, int):
e = _keep_coeff(Integer(exp), e)
elif isinstance(exp, Rational):
e = _keep_coeff(exp, e)
else:
e *= exp
args.append(b**e)
else:
args.append(factor)
return Mul(*args)
def mul(self, other): # Factors
"""Return Factors of ``self * other``.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.mul(b)
Factors({x: 2, y: 3, z: -1})
>>> a*b
Factors({x: 2, y: 3, z: -1})
"""
if not isinstance(other, Factors):
other = Factors(other)
if any(f.is_zero for f in (self, other)):
return Factors(S.Zero)
factors = dict(self.factors)
for factor, exp in other.factors.items():
if factor in factors:
exp = factors[factor] + exp
if not exp:
del factors[factor]
continue
factors[factor] = exp
return Factors(factors)
def normal(self, other):
"""Return ``self`` and ``other`` with ``gcd`` removed from each.
The only differences between this and method ``div`` is that this
is 1) optimized for the case when there are few factors in common and
2) this does not raise an error if ``other`` is zero.
See Also
========
div
"""
if not isinstance(other, Factors):
other = Factors(other)
if other.is_zero:
return (Factors(), Factors(S.Zero))
if self.is_zero:
return (Factors(S.Zero), Factors())
self_factors = dict(self.factors)
other_factors = dict(other.factors)
for factor, self_exp in self.factors.items():
try:
other_exp = other.factors[factor]
except KeyError:
continue
exp = self_exp - other_exp
if not exp:
del self_factors[factor]
del other_factors[factor]
elif _isnumber(exp):
if exp > 0:
self_factors[factor] = exp
del other_factors[factor]
else:
del self_factors[factor]
other_factors[factor] = -exp
else:
r = self_exp.extract_additively(other_exp)
if r is not None:
if r:
self_factors[factor] = r
del other_factors[factor]
else: # should be handled already
del self_factors[factor]
del other_factors[factor]
else:
sc, sa = self_exp.as_coeff_Add()
if sc:
oc, oa = other_exp.as_coeff_Add()
diff = sc - oc
if diff > 0:
self_factors[factor] -= oc
other_exp = oa
elif diff < 0:
self_factors[factor] -= sc
other_factors[factor] -= sc
other_exp = oa - diff
else:
self_factors[factor] = sa
other_exp = oa
if other_exp:
other_factors[factor] = other_exp
else:
del other_factors[factor]
return Factors(self_factors), Factors(other_factors)
def div(self, other): # Factors
"""Return ``self`` and ``other`` with ``gcd`` removed from each.
This is optimized for the case when there are many factors in common.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> from sympy import S
>>> a = Factors((x*y**2).as_powers_dict())
>>> a.div(a)
(Factors({}), Factors({}))
>>> a.div(x*z)
(Factors({y: 2}), Factors({z: 1}))
The ``/`` operator only gives ``quo``:
>>> a/x
Factors({y: 2})
Factors treats its factors as though they are all in the numerator, so
if you violate this assumption the results will be correct but will
not strictly correspond to the numerator and denominator of the ratio:
>>> a.div(x/z)
(Factors({y: 2}), Factors({z: -1}))
Factors is also naive about bases: it does not attempt any denesting
of Rational-base terms, for example the following does not become
2**(2*x)/2.
>>> Factors(2**(2*x + 2)).div(S(8))
(Factors({2: 2*x + 2}), Factors({8: 1}))
factor_terms can clean up such Rational-bases powers:
>>> from sympy.core.exprtools import factor_terms
>>> n, d = Factors(2**(2*x + 2)).div(S(8))
>>> n.as_expr()/d.as_expr()
2**(2*x + 2)/8
>>> factor_terms(_)
2**(2*x)/2
"""
quo, rem = dict(self.factors), {}
if not isinstance(other, Factors):
other = Factors(other)
if other.is_zero:
raise ZeroDivisionError
if self.is_zero:
return (Factors(S.Zero), Factors())
for factor, exp in other.factors.items():
if factor in quo:
d = quo[factor] - exp
if _isnumber(d):
if d <= 0:
del quo[factor]
if d >= 0:
if d:
quo[factor] = d
continue
exp = -d
else:
r = quo[factor].extract_additively(exp)
if r is not None:
if r:
quo[factor] = r
else: # should be handled already
del quo[factor]
else:
other_exp = exp
sc, sa = quo[factor].as_coeff_Add()
if sc:
oc, oa = other_exp.as_coeff_Add()
diff = sc - oc
if diff > 0:
quo[factor] -= oc
other_exp = oa
elif diff < 0:
quo[factor] -= sc
other_exp = oa - diff
else:
quo[factor] = sa
other_exp = oa
if other_exp:
rem[factor] = other_exp
else:
assert factor not in rem
continue
rem[factor] = exp
return Factors(quo), Factors(rem)
def quo(self, other): # Factors
"""Return numerator Factor of ``self / other``.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.quo(b) # same as a/b
Factors({y: 1})
"""
return self.div(other)[0]
def rem(self, other): # Factors
"""Return denominator Factors of ``self / other``.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.rem(b)
Factors({z: -1})
>>> a.rem(a)
Factors({})
"""
return self.div(other)[1]
def pow(self, other): # Factors
"""Return self raised to a non-negative integer power.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y
>>> a = Factors((x*y**2).as_powers_dict())
>>> a**2
Factors({x: 2, y: 4})
"""
if isinstance(other, Factors):
other = other.as_expr()
if other.is_Integer:
other = int(other)
if isinstance(other, SYMPY_INTS) and other >= 0:
factors = {}
if other:
for factor, exp in self.factors.items():
factors[factor] = exp*other
return Factors(factors)
else:
raise ValueError("expected non-negative integer, got %s" % other)
def gcd(self, other): # Factors
"""Return Factors of ``gcd(self, other)``. The keys are
the intersection of factors with the minimum exponent for
each factor.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.gcd(b)
Factors({x: 1, y: 1})
"""
if not isinstance(other, Factors):
other = Factors(other)
if other.is_zero:
return Factors(self.factors)
factors = {}
for factor, exp in self.factors.items():
factor, exp = sympify(factor), sympify(exp)
if factor in other.factors:
lt = (exp - other.factors[factor]).is_negative
if lt == True:
factors[factor] = exp
elif lt == False:
factors[factor] = other.factors[factor]
return Factors(factors)
def lcm(self, other): # Factors
"""Return Factors of ``lcm(self, other)`` which are
the union of factors with the maximum exponent for
each factor.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.lcm(b)
Factors({x: 1, y: 2, z: -1})
"""
if not isinstance(other, Factors):
other = Factors(other)
if any(f.is_zero for f in (self, other)):
return Factors(S.Zero)
factors = dict(self.factors)
for factor, exp in other.factors.items():
if factor in factors:
exp = max(exp, factors[factor])
factors[factor] = exp
return Factors(factors)
def __mul__(self, other): # Factors
return self.mul(other)
def __divmod__(self, other): # Factors
return self.div(other)
def __div__(self, other): # Factors
return self.quo(other)
__truediv__ = __div__
def __mod__(self, other): # Factors
return self.rem(other)
def __pow__(self, other): # Factors
return self.pow(other)
def __eq__(self, other): # Factors
if not isinstance(other, Factors):
other = Factors(other)
return self.factors == other.factors
def __ne__(self, other): # Factors
return not self.__eq__(other)
class Term(object):
"""Efficient representation of ``coeff*(numer/denom)``. """
__slots__ = ['coeff', 'numer', 'denom']
def __init__(self, term, numer=None, denom=None): # Term
if numer is None and denom is None:
if not term.is_commutative:
raise NonCommutativeExpression(
'commutative expression expected')
coeff, factors = term.as_coeff_mul()
numer, denom = defaultdict(int), defaultdict(int)
for factor in factors:
base, exp = decompose_power(factor)
if base.is_Add:
cont, base = base.primitive()
coeff *= cont**exp
if exp > 0:
numer[base] += exp
else:
denom[base] += -exp
numer = Factors(numer)
denom = Factors(denom)
else:
coeff = term
if numer is None:
numer = Factors()
if denom is None:
denom = Factors()
self.coeff = coeff
self.numer = numer
self.denom = denom
def __hash__(self): # Term
return hash((self.coeff, self.numer, self.denom))
def __repr__(self): # Term
return "Term(%s, %s, %s)" % (self.coeff, self.numer, self.denom)
def as_expr(self): # Term
return self.coeff*(self.numer.as_expr()/self.denom.as_expr())
def mul(self, other): # Term
coeff = self.coeff*other.coeff
numer = self.numer.mul(other.numer)
denom = self.denom.mul(other.denom)
numer, denom = numer.normal(denom)
return Term(coeff, numer, denom)
def inv(self): # Term
return Term(1/self.coeff, self.denom, self.numer)
def quo(self, other): # Term
return self.mul(other.inv())
def pow(self, other): # Term
if other < 0:
return self.inv().pow(-other)
else:
return Term(self.coeff ** other,
self.numer.pow(other),
self.denom.pow(other))
def gcd(self, other): # Term
return Term(self.coeff.gcd(other.coeff),
self.numer.gcd(other.numer),
self.denom.gcd(other.denom))
def lcm(self, other): # Term
return Term(self.coeff.lcm(other.coeff),
self.numer.lcm(other.numer),
self.denom.lcm(other.denom))
def __mul__(self, other): # Term
if isinstance(other, Term):
return self.mul(other)
else:
return NotImplemented
def __div__(self, other): # Term
if isinstance(other, Term):
return self.quo(other)
else:
return NotImplemented
__truediv__ = __div__
def __pow__(self, other): # Term
if isinstance(other, SYMPY_INTS):
return self.pow(other)
else:
return NotImplemented
def __eq__(self, other): # Term
return (self.coeff == other.coeff and
self.numer == other.numer and
self.denom == other.denom)
def __ne__(self, other): # Term
return not self.__eq__(other)
def _gcd_terms(terms, isprimitive=False, fraction=True):
"""Helper function for :func:`gcd_terms`.
If ``isprimitive`` is True then the call to primitive
for an Add will be skipped. This is useful when the
content has already been extrated.
If ``fraction`` is True then the expression will appear over a common
denominator, the lcm of all term denominators.
"""
if isinstance(terms, Basic) and not isinstance(terms, Tuple):
terms = Add.make_args(terms)
terms = list(map(Term, [t for t in terms if t]))
# there is some simplification that may happen if we leave this
# here rather than duplicate it before the mapping of Term onto
# the terms
if len(terms) == 0:
return S.Zero, S.Zero, S.One
if len(terms) == 1:
cont = terms[0].coeff
numer = terms[0].numer.as_expr()
denom = terms[0].denom.as_expr()
else:
cont = terms[0]
for term in terms[1:]:
cont = cont.gcd(term)
for i, term in enumerate(terms):
terms[i] = term.quo(cont)
if fraction:
denom = terms[0].denom
for term in terms[1:]:
denom = denom.lcm(term.denom)
numers = []
for term in terms:
numer = term.numer.mul(denom.quo(term.denom))
numers.append(term.coeff*numer.as_expr())
else:
numers = [t.as_expr() for t in terms]
denom = Term(S(1)).numer
cont = cont.as_expr()
numer = Add(*numers)
denom = denom.as_expr()
if not isprimitive and numer.is_Add:
_cont, numer = numer.primitive()
cont *= _cont
return cont, numer, denom
def gcd_terms(terms, isprimitive=False, clear=True, fraction=True):
"""Compute the GCD of ``terms`` and put them together.
``terms`` can be an expression or a non-Basic sequence of expressions
which will be handled as though they are terms from a sum.
If ``isprimitive`` is True the _gcd_terms will not run the primitive
method on the terms.
``clear`` controls the removal of integers from the denominator of an Add
expression. When True (default), all numerical denominator will be cleared;
when False the denominators will be cleared only if all terms had numerical
denominators other than 1.
``fraction``, when True (default), will put the expression over a common
denominator.
Examples
========
>>> from sympy.core import gcd_terms
>>> from sympy.abc import x, y
>>> gcd_terms((x + 1)**2*y + (x + 1)*y**2)
y*(x + 1)*(x + y + 1)
>>> gcd_terms(x/2 + 1)
(x + 2)/2
>>> gcd_terms(x/2 + 1, clear=False)
x/2 + 1
>>> gcd_terms(x/2 + y/2, clear=False)
(x + y)/2
>>> gcd_terms(x/2 + 1/x)
(x**2 + 2)/(2*x)
>>> gcd_terms(x/2 + 1/x, fraction=False)
(x + 2/x)/2
>>> gcd_terms(x/2 + 1/x, fraction=False, clear=False)
x/2 + 1/x
>>> gcd_terms(x/2/y + 1/x/y)
(x**2 + 2)/(2*x*y)
>>> gcd_terms(x/2/y + 1/x/y, clear=False)
(x**2/2 + 1)/(x*y)
>>> gcd_terms(x/2/y + 1/x/y, clear=False, fraction=False)
(x/2 + 1/x)/y
The ``clear`` flag was ignored in this case because the returned
expression was a rational expression, not a simple sum.
See Also
========
factor_terms, sympy.polys.polytools.terms_gcd
"""
def mask(terms):
"""replace nc portions of each term with a unique Dummy symbols
and return the replacements to restore them"""
args = [(a, []) if a.is_commutative else a.args_cnc() for a in terms]
reps = []
for i, (c, nc) in enumerate(args):
if nc:
nc = Mul._from_args(nc)
d = Dummy()
reps.append((d, nc))
c.append(d)
args[i] = Mul._from_args(c)
else:
args[i] = c
return args, dict(reps)
isadd = isinstance(terms, Add)
addlike = isadd or not isinstance(terms, Basic) and \
is_sequence(terms, include=set) and \
not isinstance(terms, Dict)
if addlike:
if isadd: # i.e. an Add
terms = list(terms.args)
else:
terms = sympify(terms)
terms, reps = mask(terms)
cont, numer, denom = _gcd_terms(terms, isprimitive, fraction)
numer = numer.xreplace(reps)
coeff, factors = cont.as_coeff_Mul()
if not clear:
c, _coeff = coeff.as_coeff_Mul()
if not c.is_Integer and not clear and numer.is_Add:
n, d = c.as_numer_denom()
_numer = numer/d
if any(a.as_coeff_Mul()[0].is_Integer
for a in _numer.args):
numer = _numer
coeff = n*_coeff
return _keep_coeff(coeff, factors*numer/denom, clear=clear)
if not isinstance(terms, Basic):
return terms
if terms.is_Atom:
return terms
if terms.is_Mul:
c, args = terms.as_coeff_mul()
return _keep_coeff(c, Mul(*[gcd_terms(i, isprimitive, clear, fraction)
for i in args]), clear=clear)
def handle(a):
# don't treat internal args like terms of an Add
if not isinstance(a, Expr):
if isinstance(a, Basic):
return a.func(*[handle(i) for i in a.args])
return type(a)([handle(i) for i in a])
return gcd_terms(a, isprimitive, clear, fraction)
if isinstance(terms, Dict):
return Dict(*[(k, handle(v)) for k, v in terms.args])
return terms.func(*[handle(i) for i in terms.args])
def factor_terms(expr, radical=False, clear=False, fraction=False, sign=True):
"""Remove common factors from terms in all arguments without
changing the underlying structure of the expr. No expansion or
simplification (and no processing of non-commutatives) is performed.
If radical=True then a radical common to all terms will be factored
out of any Add sub-expressions of the expr.
If clear=False (default) then coefficients will not be separated
from a single Add if they can be distributed to leave one or more
terms with integer coefficients.
If fraction=True (default is False) then a common denominator will be
constructed for the expression.
If sign=True (default) then even if the only factor in common is a -1,
it will be factored out of the expression.
Examples
========
>>> from sympy import factor_terms, Symbol
>>> from sympy.abc import x, y
>>> factor_terms(x + x*(2 + 4*y)**3)
x*(8*(2*y + 1)**3 + 1)
>>> A = Symbol('A', commutative=False)
>>> factor_terms(x*A + x*A + x*y*A)
x*(y*A + 2*A)
When ``clear`` is False, a rational will only be factored out of an
Add expression if all terms of the Add have coefficients that are
fractions:
>>> factor_terms(x/2 + 1, clear=False)
x/2 + 1
>>> factor_terms(x/2 + 1, clear=True)
(x + 2)/2
If a -1 is all that can be factored out, to *not* factor it out, the
flag ``sign`` must be False:
>>> factor_terms(-x - y)
-(x + y)
>>> factor_terms(-x - y, sign=False)
-x - y
>>> factor_terms(-2*x - 2*y, sign=False)
-2*(x + y)
See Also
========
gcd_terms, sympy.polys.polytools.terms_gcd
"""
def do(expr):
from sympy.concrete.summations import Sum
from sympy.simplify.simplify import factor_sum
is_iterable = iterable(expr)
if not isinstance(expr, Basic) or expr.is_Atom:
if is_iterable:
return type(expr)([do(i) for i in expr])
return expr
if expr.is_Pow or expr.is_Function or \
is_iterable or not hasattr(expr, 'args_cnc'):
args = expr.args
newargs = tuple([do(i) for i in args])
if newargs == args:
return expr
return expr.func(*newargs)
if isinstance(expr, Sum):
return factor_sum(expr, radical=radical, clear=clear, fraction=fraction, sign=sign)
cont, p = expr.as_content_primitive(radical=radical, clear=clear)
if p.is_Add:
list_args = [do(a) for a in Add.make_args(p)]
# get a common negative (if there) which gcd_terms does not remove
if all(a.as_coeff_Mul()[0] < 0 for a in list_args):
cont = -cont
list_args = [-a for a in list_args]
# watch out for exp(-(x+2)) which gcd_terms will change to exp(-x-2)
special = {}
for i, a in enumerate(list_args):
b, e = a.as_base_exp()
if e.is_Mul and e != Mul(*e.args):
list_args[i] = Dummy()
special[list_args[i]] = a
# rebuild p not worrying about the order which gcd_terms will fix
p = Add._from_args(list_args)
p = gcd_terms(p,
isprimitive=True,
clear=clear,
fraction=fraction).xreplace(special)
elif p.args:
p = p.func(
*[do(a) for a in p.args])
rv = _keep_coeff(cont, p, clear=clear, sign=sign)
return rv
expr = sympify(expr)
return do(expr)
def _mask_nc(eq, name=None):
"""
Return ``eq`` with non-commutative objects replaced with Dummy
symbols. A dictionary that can be used to restore the original
values is returned: if it is None, the expression is noncommutative
and cannot be made commutative. The third value returned is a list
of any non-commutative symbols that appear in the returned equation.
``name``, if given, is the name that will be used with numered Dummy
variables that will replace the non-commutative objects and is mainly
used for doctesting purposes.
Notes
=====
All non-commutative objects other than Symbols are replaced with
a non-commutative Symbol. Identical objects will be identified
by identical symbols.
If there is only 1 non-commutative object in an expression it will
be replaced with a commutative symbol. Otherwise, the non-commutative
entities are retained and the calling routine should handle
replacements in this case since some care must be taken to keep
track of the ordering of symbols when they occur within Muls.
Examples
========
>>> from sympy.physics.secondquant import Commutator, NO, F, Fd
>>> from sympy import symbols, Mul
>>> from sympy.core.exprtools import _mask_nc
>>> from sympy.abc import x, y
>>> A, B, C = symbols('A,B,C', commutative=False)
One nc-symbol:
>>> _mask_nc(A**2 - x**2, 'd')
(_d0**2 - x**2, {_d0: A}, [])
Multiple nc-symbols:
>>> _mask_nc(A**2 - B**2, 'd')
(A**2 - B**2, None, [A, B])
An nc-object with nc-symbols but no others outside of it:
>>> _mask_nc(1 + x*Commutator(A, B), 'd')
(_d0*x + 1, {_d0: Commutator(A, B)}, [])
>>> _mask_nc(NO(Fd(x)*F(y)), 'd')
(_d0, {_d0: NO(CreateFermion(x)*AnnihilateFermion(y))}, [])
Multiple nc-objects:
>>> eq = x*Commutator(A, B) + x*Commutator(A, C)*Commutator(A, B)
>>> _mask_nc(eq, 'd')
(x*_d0 + x*_d1*_d0, {_d0: Commutator(A, B), _d1: Commutator(A, C)}, [_d0, _d1])
Multiple nc-objects and nc-symbols:
>>> eq = A*Commutator(A, B) + B*Commutator(A, C)
>>> _mask_nc(eq, 'd')
(A*_d0 + B*_d1, {_d0: Commutator(A, B), _d1: Commutator(A, C)}, [_d0, _d1, A, B])
If there is an object that:
- doesn't contain nc-symbols
- but has arguments which derive from Basic, not Expr
- and doesn't define an _eval_is_commutative routine
then it will give False (or None?) for the is_commutative test. Such
objects are also removed by this routine:
>>> from sympy import Basic
>>> eq = (1 + Mul(Basic(), Basic(), evaluate=False))
>>> eq.is_commutative
False
>>> _mask_nc(eq, 'd')
(_d0**2 + 1, {_d0: Basic()}, [])
"""
name = name or 'mask'
# Make Dummy() append sequential numbers to the name
def numbered_names():
i = 0
while True:
yield name + str(i)
i += 1
names = numbered_names()
def Dummy(*args, **kwargs):
from sympy import Dummy
return Dummy(next(names), *args, **kwargs)
expr = eq
if expr.is_commutative:
return eq, {}, []
# identify nc-objects; symbols and other
rep = []
nc_obj = set()
nc_syms = set()
pot = preorder_traversal(expr, keys=default_sort_key)
for i, a in enumerate(pot):
if any(a == r[0] for r in rep):
pot.skip()
elif not a.is_commutative:
if a.is_Symbol:
nc_syms.add(a)
elif not (a.is_Add or a.is_Mul or a.is_Pow):
if all(s.is_commutative for s in a.free_symbols):
rep.append((a, Dummy()))
else:
nc_obj.add(a)
pot.skip()
# If there is only one nc symbol or object, it can be factored regularly
# but polys is going to complain, so replace it with a Dummy.
if len(nc_obj) == 1 and not nc_syms:
rep.append((nc_obj.pop(), Dummy()))
elif len(nc_syms) == 1 and not nc_obj:
rep.append((nc_syms.pop(), Dummy()))
# Any remaining nc-objects will be replaced with an nc-Dummy and
# identified as an nc-Symbol to watch out for
nc_obj = sorted(nc_obj, key=default_sort_key)
for n in nc_obj:
nc = Dummy(commutative=False)
rep.append((n, nc))
nc_syms.add(nc)
expr = expr.subs(rep)
nc_syms = list(nc_syms)
nc_syms.sort(key=default_sort_key)
return expr, {v: k for k, v in rep} or None, nc_syms
def factor_nc(expr):
"""Return the factored form of ``expr`` while handling non-commutative
expressions.
Examples
========
>>> from sympy.core.exprtools import factor_nc
>>> from sympy import Symbol
>>> from sympy.abc import x
>>> A = Symbol('A', commutative=False)
>>> B = Symbol('B', commutative=False)
>>> factor_nc((x**2 + 2*A*x + A**2).expand())
(x + A)**2
>>> factor_nc(((x + A)*(x + B)).expand())
(x + A)*(x + B)
"""
from sympy.simplify.simplify import powsimp
from sympy.polys import gcd, factor
def _pemexpand(expr):
"Expand with the minimal set of hints necessary to check the result."
return expr.expand(deep=True, mul=True, power_exp=True,
power_base=False, basic=False, multinomial=True, log=False)
expr = sympify(expr)
if not isinstance(expr, Expr) or not expr.args:
return expr
if not expr.is_Add:
return expr.func(*[factor_nc(a) for a in expr.args])
expr, rep, nc_symbols = _mask_nc(expr)
if rep:
return factor(expr).subs(rep)
else:
args = [a.args_cnc() for a in Add.make_args(expr)]
c = g = l = r = S.One
hit = False
# find any commutative gcd term
for i, a in enumerate(args):
if i == 0:
c = Mul._from_args(a[0])
elif a[0]:
c = gcd(c, Mul._from_args(a[0]))
else:
c = S.One
if c is not S.One:
hit = True
c, g = c.as_coeff_Mul()
if g is not S.One:
for i, (cc, _) in enumerate(args):
cc = list(Mul.make_args(Mul._from_args(list(cc))/g))
args[i][0] = cc
for i, (cc, _) in enumerate(args):
cc[0] = cc[0]/c
args[i][0] = cc
# find any noncommutative common prefix
for i, a in enumerate(args):
if i == 0:
n = a[1][:]
else:
n = common_prefix(n, a[1])
if not n:
# is there a power that can be extracted?
if not args[0][1]:
break
b, e = args[0][1][0].as_base_exp()
ok = False
if e.is_Integer:
for t in args:
if not t[1]:
break
bt, et = t[1][0].as_base_exp()
if et.is_Integer and bt == b:
e = min(e, et)
else:
break
else:
ok = hit = True
l = b**e
il = b**-e
for i, a in enumerate(args):
args[i][1][0] = il*args[i][1][0]
break
if not ok:
break
else:
hit = True
lenn = len(n)
l = Mul(*n)
for i, a in enumerate(args):
args[i][1] = args[i][1][lenn:]
# find any noncommutative common suffix
for i, a in enumerate(args):
if i == 0:
n = a[1][:]
else:
n = common_suffix(n, a[1])
if not n:
# is there a power that can be extracted?
if not args[0][1]:
break
b, e = args[0][1][-1].as_base_exp()
ok = False
if e.is_Integer:
for t in args:
if not t[1]:
break
bt, et = t[1][-1].as_base_exp()
if et.is_Integer and bt == b:
e = min(e, et)
else:
break
else:
ok = hit = True
r = b**e
il = b**-e
for i, a in enumerate(args):
args[i][1][-1] = args[i][1][-1]*il
break
if not ok:
break
else:
hit = True
lenn = len(n)
r = Mul(*n)
for i, a in enumerate(args):
args[i][1] = a[1][:len(a[1]) - lenn]
if hit:
mid = Add(*[Mul(*cc)*Mul(*nc) for cc, nc in args])
else:
mid = expr
# sort the symbols so the Dummys would appear in the same
# order as the original symbols, otherwise you may introduce
# a factor of -1, e.g. A**2 - B**2) -- {A:y, B:x} --> y**2 - x**2
# and the former factors into two terms, (A - B)*(A + B) while the
# latter factors into 3 terms, (-1)*(x - y)*(x + y)
rep1 = [(n, Dummy()) for n in sorted(nc_symbols, key=default_sort_key)]
unrep1 = [(v, k) for k, v in rep1]
unrep1.reverse()
new_mid, r2, _ = _mask_nc(mid.subs(rep1))
new_mid = powsimp(factor(new_mid))
new_mid = new_mid.subs(r2).subs(unrep1)
if new_mid.is_Pow:
return _keep_coeff(c, g*l*new_mid*r)
if new_mid.is_Mul:
# XXX TODO there should be a way to inspect what order the terms
# must be in and just select the plausible ordering without
# checking permutations
cfac = []
ncfac = []
for f in new_mid.args:
if f.is_commutative:
cfac.append(f)
else:
b, e = f.as_base_exp()
if e.is_Integer:
ncfac.extend([b]*e)
else:
ncfac.append(f)
pre_mid = g*Mul(*cfac)*l
target = _pemexpand(expr/c)
for s in variations(ncfac, len(ncfac)):
ok = pre_mid*Mul(*s)*r
if _pemexpand(ok) == target:
return _keep_coeff(c, ok)
# mid was an Add that didn't factor successfully
return _keep_coeff(c, g*l*mid*r)
| bsd-3-clause |
wemanuel/smry | smry/oauth2client/crypt.py | 20 | 10913 | # -*- coding: utf-8 -*-
#
# Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Crypto-related routines for oauth2client."""
import base64
import json
import logging
import time
CLOCK_SKEW_SECS = 300 # 5 minutes in seconds
AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds
MAX_TOKEN_LIFETIME_SECS = 86400 # 1 day in seconds
logger = logging.getLogger(__name__)
class AppIdentityError(Exception):
pass
try:
from OpenSSL import crypto
class OpenSSLVerifier(object):
"""Verifies the signature on a message."""
def __init__(self, pubkey):
"""Constructor.
Args:
pubkey, OpenSSL.crypto.PKey, The public key to verify with.
"""
self._pubkey = pubkey
def verify(self, message, signature):
"""Verifies a message against a signature.
Args:
message: string, The message to verify.
signature: string, The signature on the message.
Returns:
True if message was signed by the private key associated with the public
key that this object was constructed with.
"""
try:
crypto.verify(self._pubkey, signature, message, 'sha256')
return True
except:
return False
@staticmethod
def from_string(key_pem, is_x509_cert):
"""Construct a Verified instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is
expected to be an RSA key in PEM format.
Returns:
Verifier instance.
Raises:
OpenSSL.crypto.Error if the key_pem can't be parsed.
"""
if is_x509_cert:
pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem)
else:
pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
return OpenSSLVerifier(pubkey)
class OpenSSLSigner(object):
"""Signs messages with a private key."""
def __init__(self, pkey):
"""Constructor.
Args:
pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
"""
self._key = pkey
def sign(self, message):
"""Signs a message.
Args:
message: string, Message to be signed.
Returns:
string, The signature of the message for the given key.
"""
return crypto.sign(self._key, message, 'sha256')
@staticmethod
def from_string(key, password='notasecret'):
"""Construct a Signer instance from a string.
Args:
key: string, private key in PKCS12 or PEM format.
password: string, password for the private key file.
Returns:
Signer instance.
Raises:
OpenSSL.crypto.Error if the key can't be parsed.
"""
parsed_pem_key = _parse_pem_key(key)
if parsed_pem_key:
pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key)
else:
pkey = crypto.load_pkcs12(key, password.encode('utf8')).get_privatekey()
return OpenSSLSigner(pkey)
except ImportError:
OpenSSLVerifier = None
OpenSSLSigner = None
try:
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA256
from Crypto.Signature import PKCS1_v1_5
from Crypto.Util.asn1 import DerSequence
class PyCryptoVerifier(object):
"""Verifies the signature on a message."""
def __init__(self, pubkey):
"""Constructor.
Args:
pubkey, OpenSSL.crypto.PKey (or equiv), The public key to verify with.
"""
self._pubkey = pubkey
def verify(self, message, signature):
"""Verifies a message against a signature.
Args:
message: string, The message to verify.
signature: string, The signature on the message.
Returns:
True if message was signed by the private key associated with the public
key that this object was constructed with.
"""
try:
return PKCS1_v1_5.new(self._pubkey).verify(
SHA256.new(message), signature)
except:
return False
@staticmethod
def from_string(key_pem, is_x509_cert):
"""Construct a Verified instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is
expected to be an RSA key in PEM format.
Returns:
Verifier instance.
"""
if is_x509_cert:
pemLines = key_pem.replace(' ', '').split()
certDer = _urlsafe_b64decode(''.join(pemLines[1:-1]))
certSeq = DerSequence()
certSeq.decode(certDer)
tbsSeq = DerSequence()
tbsSeq.decode(certSeq[0])
pubkey = RSA.importKey(tbsSeq[6])
else:
pubkey = RSA.importKey(key_pem)
return PyCryptoVerifier(pubkey)
class PyCryptoSigner(object):
"""Signs messages with a private key."""
def __init__(self, pkey):
"""Constructor.
Args:
pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
"""
self._key = pkey
def sign(self, message):
"""Signs a message.
Args:
message: string, Message to be signed.
Returns:
string, The signature of the message for the given key.
"""
return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
@staticmethod
def from_string(key, password='notasecret'):
"""Construct a Signer instance from a string.
Args:
key: string, private key in PEM format.
password: string, password for private key file. Unused for PEM files.
Returns:
Signer instance.
Raises:
NotImplementedError if they key isn't in PEM format.
"""
parsed_pem_key = _parse_pem_key(key)
if parsed_pem_key:
pkey = RSA.importKey(parsed_pem_key)
else:
raise NotImplementedError(
'PKCS12 format is not supported by the PyCrypto library. '
'Try converting to a "PEM" '
'(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > privatekey.pem) '
'or using PyOpenSSL if native code is an option.')
return PyCryptoSigner(pkey)
except ImportError:
PyCryptoVerifier = None
PyCryptoSigner = None
if OpenSSLSigner:
Signer = OpenSSLSigner
Verifier = OpenSSLVerifier
elif PyCryptoSigner:
Signer = PyCryptoSigner
Verifier = PyCryptoVerifier
else:
raise ImportError('No encryption library found. Please install either '
'PyOpenSSL, or PyCrypto 2.6 or later')
def _parse_pem_key(raw_key_input):
"""Identify and extract PEM keys.
Determines whether the given key is in the format of PEM key, and extracts
the relevant part of the key if it is.
Args:
raw_key_input: The contents of a private key file (either PEM or PKCS12).
Returns:
string, The actual key if the contents are from a PEM file, or else None.
"""
offset = raw_key_input.find('-----BEGIN ')
if offset != -1:
return raw_key_input[offset:]
def _urlsafe_b64encode(raw_bytes):
return base64.urlsafe_b64encode(raw_bytes).rstrip('=')
def _urlsafe_b64decode(b64string):
# Guard against unicode strings, which base64 can't handle.
b64string = b64string.encode('ascii')
padded = b64string + '=' * (4 - len(b64string) % 4)
return base64.urlsafe_b64decode(padded)
def _json_encode(data):
return json.dumps(data, separators=(',', ':'))
def make_signed_jwt(signer, payload):
"""Make a signed JWT.
See http://self-issued.info/docs/draft-jones-json-web-token.html.
Args:
signer: crypt.Signer, Cryptographic signer.
payload: dict, Dictionary of data to convert to JSON and then sign.
Returns:
string, The JWT for the payload.
"""
header = {'typ': 'JWT', 'alg': 'RS256'}
segments = [
_urlsafe_b64encode(_json_encode(header)),
_urlsafe_b64encode(_json_encode(payload)),
]
signing_input = '.'.join(segments)
signature = signer.sign(signing_input)
segments.append(_urlsafe_b64encode(signature))
logger.debug(str(segments))
return '.'.join(segments)
def verify_signed_jwt_with_certs(jwt, certs, audience):
"""Verify a JWT against public certs.
See http://self-issued.info/docs/draft-jones-json-web-token.html.
Args:
jwt: string, A JWT.
certs: dict, Dictionary where values of public keys in PEM format.
audience: string, The audience, 'aud', that this JWT should contain. If
None then the JWT's 'aud' parameter is not verified.
Returns:
dict, The deserialized JSON payload in the JWT.
Raises:
AppIdentityError if any checks are failed.
"""
segments = jwt.split('.')
if len(segments) != 3:
raise AppIdentityError('Wrong number of segments in token: %s' % jwt)
signed = '%s.%s' % (segments[0], segments[1])
signature = _urlsafe_b64decode(segments[2])
# Parse token.
json_body = _urlsafe_b64decode(segments[1])
try:
parsed = json.loads(json_body)
except:
raise AppIdentityError('Can\'t parse token: %s' % json_body)
# Check signature.
verified = False
for _, pem in certs.items():
verifier = Verifier.from_string(pem, True)
if verifier.verify(signed, signature):
verified = True
break
if not verified:
raise AppIdentityError('Invalid token signature: %s' % jwt)
# Check creation timestamp.
iat = parsed.get('iat')
if iat is None:
raise AppIdentityError('No iat field in token: %s' % json_body)
earliest = iat - CLOCK_SKEW_SECS
# Check expiration timestamp.
now = long(time.time())
exp = parsed.get('exp')
if exp is None:
raise AppIdentityError('No exp field in token: %s' % json_body)
if exp >= now + MAX_TOKEN_LIFETIME_SECS:
raise AppIdentityError('exp field too far in future: %s' % json_body)
latest = exp + CLOCK_SKEW_SECS
if now < earliest:
raise AppIdentityError('Token used too early, %d < %d: %s' %
(now, earliest, json_body))
if now > latest:
raise AppIdentityError('Token used too late, %d > %d: %s' %
(now, latest, json_body))
# Check audience.
if audience is not None:
aud = parsed.get('aud')
if aud is None:
raise AppIdentityError('No aud field in token: %s' % json_body)
if aud != audience:
raise AppIdentityError('Wrong recipient, %s != %s: %s' %
(aud, audience, json_body))
return parsed
| apache-2.0 |
ovnicraft/edx-platform | openedx/core/lib/block_cache/tests/test_block_structure_factory.py | 33 | 4070 | """
Tests for block_structure_factory.py
"""
# pylint: disable=protected-access
from mock import patch
from unittest import TestCase
from ..block_structure_factory import BlockStructureFactory
from .test_utils import (
MockCache, MockModulestoreFactory, MockTransformer, ChildrenMapTestMixin
)
class TestBlockStructureFactory(TestCase, ChildrenMapTestMixin):
"""
Tests for BlockStructureFactory
"""
def setUp(self):
super(TestBlockStructureFactory, self).setUp()
self.children_map = self.SIMPLE_CHILDREN_MAP
self.modulestore = MockModulestoreFactory.create(self.children_map)
self.block_structure = BlockStructureFactory.create_from_modulestore(
root_block_usage_key=0, modulestore=self.modulestore
)
self.transformers = [MockTransformer]
mock_registry = patch(
'openedx.core.lib.block_cache.transformer_registry.TransformerRegistry.get_available_plugins'
)
mock_registry.return_value = {transformer.name(): transformer for transformer in self.transformers}
self.addCleanup(mock_registry.stop)
mock_registry.start()
def add_transformers(self):
"""
Add each registered transformer to the block structure.
Mimic collection by setting test transformer block data.
"""
for transformer in self.transformers:
self.block_structure._add_transformer(transformer)
self.block_structure.set_transformer_block_field(
usage_key=0, transformer=transformer, key='test', value='{} val'.format(transformer.name())
)
def test_create_from_modulestore(self):
self.assert_block_structure(self.block_structure, self.children_map)
def test_not_in_cache(self):
cache = MockCache()
self.assertIsNone(
BlockStructureFactory.create_from_cache(
root_block_usage_key=0,
cache=cache,
transformers=self.transformers,
)
)
def test_uncollected_transformers(self):
cache = MockCache()
# serialize the structure to cache, but without collecting any transformer data
BlockStructureFactory.serialize_to_cache(self.block_structure, cache)
with patch('openedx.core.lib.block_cache.block_structure_factory.logger.info') as mock_logger:
# cached data does not have collected information for all registered transformers
self.assertIsNone(
BlockStructureFactory.create_from_cache(
root_block_usage_key=0,
cache=cache,
transformers=self.transformers,
)
)
self.assertTrue(mock_logger.called)
def test_cache(self):
cache = MockCache()
# collect transformer data
self.add_transformers()
# serialize to cache
BlockStructureFactory.serialize_to_cache(self.block_structure, cache)
# test re-create from cache
self.modulestore.get_items_call_count = 0
from_cache_block_structure = BlockStructureFactory.create_from_cache(
root_block_usage_key=0,
cache=cache,
transformers=self.transformers,
)
self.assertIsNotNone(from_cache_block_structure)
self.assert_block_structure(from_cache_block_structure, self.children_map)
self.assertEquals(self.modulestore.get_items_call_count, 0)
def test_remove_from_cache(self):
cache = MockCache()
# collect transformer data
self.add_transformers()
# serialize to cache
BlockStructureFactory.serialize_to_cache(self.block_structure, cache)
# remove from cache
BlockStructureFactory.remove_from_cache(root_block_usage_key=0, cache=cache)
self.assertIsNone(
BlockStructureFactory.create_from_cache(
root_block_usage_key=0,
cache=cache,
transformers=self.transformers
)
)
| agpl-3.0 |
endlessm/chromium-browser | tools/swarming_client/third_party/oauth2client/_pycrypto_crypt.py | 6 | 4295 | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""pyCrypto Crypto-related routines for oauth2client."""
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA256
from Crypto.Signature import PKCS1_v1_5
from Crypto.Util.asn1 import DerSequence
from oauth2client._helpers import _parse_pem_key
from oauth2client._helpers import _to_bytes
from oauth2client._helpers import _urlsafe_b64decode
class PyCryptoVerifier(object):
"""Verifies the signature on a message."""
def __init__(self, pubkey):
"""Constructor.
Args:
pubkey: OpenSSL.crypto.PKey (or equiv), The public key to verify
with.
"""
self._pubkey = pubkey
def verify(self, message, signature):
"""Verifies a message against a signature.
Args:
message: string or bytes, The message to verify. If string, will be
encoded to bytes as utf-8.
signature: string or bytes, The signature on the message.
Returns:
True if message was signed by the private key associated with the
public key that this object was constructed with.
"""
message = _to_bytes(message, encoding='utf-8')
return PKCS1_v1_5.new(self._pubkey).verify(
SHA256.new(message), signature)
@staticmethod
def from_string(key_pem, is_x509_cert):
"""Construct a Verified instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it
is expected to be an RSA key in PEM format.
Returns:
Verifier instance.
"""
if is_x509_cert:
key_pem = _to_bytes(key_pem)
pemLines = key_pem.replace(b' ', b'').split()
certDer = _urlsafe_b64decode(b''.join(pemLines[1:-1]))
certSeq = DerSequence()
certSeq.decode(certDer)
tbsSeq = DerSequence()
tbsSeq.decode(certSeq[0])
pubkey = RSA.importKey(tbsSeq[6])
else:
pubkey = RSA.importKey(key_pem)
return PyCryptoVerifier(pubkey)
class PyCryptoSigner(object):
"""Signs messages with a private key."""
def __init__(self, pkey):
"""Constructor.
Args:
pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with.
"""
self._key = pkey
def sign(self, message):
"""Signs a message.
Args:
message: string, Message to be signed.
Returns:
string, The signature of the message for the given key.
"""
message = _to_bytes(message, encoding='utf-8')
return PKCS1_v1_5.new(self._key).sign(SHA256.new(message))
@staticmethod
def from_string(key, password='notasecret'):
"""Construct a Signer instance from a string.
Args:
key: string, private key in PEM format.
password: string, password for private key file. Unused for PEM
files.
Returns:
Signer instance.
Raises:
NotImplementedError if the key isn't in PEM format.
"""
parsed_pem_key = _parse_pem_key(_to_bytes(key))
if parsed_pem_key:
pkey = RSA.importKey(parsed_pem_key)
else:
raise NotImplementedError(
'PKCS12 format is not supported by the PyCrypto library. '
'Try converting to a "PEM" '
'(openssl pkcs12 -in xxxxx.p12 -nodes -nocerts > '
'privatekey.pem) '
'or using PyOpenSSL if native code is an option.')
return PyCryptoSigner(pkey)
| bsd-3-clause |
edmundgentle/schoolscript | SchoolScript/bin/Debug/pythonlib/Lib/test/test_optparse.py | 3 | 62704 | #
# Test suite for Optik. Supplied by Johannes Gijsbers
# (taradino@softhome.net) -- translated from the original Optik
# test suite to this PyUnit-based version.
#
# $Id$
#
import sys
import os
import re
import copy
import unittest
from io import StringIO
from test import support
from optparse import make_option, Option, \
TitledHelpFormatter, OptionParser, OptionGroup, \
SUPPRESS_USAGE, OptionError, OptionConflictError, \
BadOptionError, OptionValueError, Values
from optparse import _match_abbrev
from optparse import _parse_num
retype = type(re.compile(''))
class InterceptedError(Exception):
def __init__(self,
error_message=None,
exit_status=None,
exit_message=None):
self.error_message = error_message
self.exit_status = exit_status
self.exit_message = exit_message
def __str__(self):
return self.error_message or self.exit_message or "intercepted error"
class InterceptingOptionParser(OptionParser):
def exit(self, status=0, msg=None):
raise InterceptedError(exit_status=status, exit_message=msg)
def error(self, msg):
raise InterceptedError(error_message=msg)
class BaseTest(unittest.TestCase):
def assertParseOK(self, args, expected_opts, expected_positional_args):
"""Assert the options are what we expected when parsing arguments.
Otherwise, fail with a nicely formatted message.
Keyword arguments:
args -- A list of arguments to parse with OptionParser.
expected_opts -- The options expected.
expected_positional_args -- The positional arguments expected.
Returns the options and positional args for further testing.
"""
(options, positional_args) = self.parser.parse_args(args)
optdict = vars(options)
self.assertEqual(optdict, expected_opts,
"""
Options are %(optdict)s.
Should be %(expected_opts)s.
Args were %(args)s.""" % locals())
self.assertEqual(positional_args, expected_positional_args,
"""
Positional arguments are %(positional_args)s.
Should be %(expected_positional_args)s.
Args were %(args)s.""" % locals ())
return (options, positional_args)
def assertRaises(self,
func,
args,
kwargs,
expected_exception,
expected_message):
"""
Assert that the expected exception is raised when calling a
function, and that the right error message is included with
that exception.
Arguments:
func -- the function to call
args -- positional arguments to `func`
kwargs -- keyword arguments to `func`
expected_exception -- exception that should be raised
expected_message -- expected exception message (or pattern
if a compiled regex object)
Returns the exception raised for further testing.
"""
if args is None:
args = ()
if kwargs is None:
kwargs = {}
try:
func(*args, **kwargs)
except expected_exception as err:
actual_message = str(err)
if isinstance(expected_message, retype):
self.assertTrue(expected_message.search(actual_message),
"""\
expected exception message pattern:
/%s/
actual exception message:
'''%s'''
""" % (expected_message.pattern, actual_message))
else:
self.assertEqual(actual_message,
expected_message,
"""\
expected exception message:
'''%s'''
actual exception message:
'''%s'''
""" % (expected_message, actual_message))
return err
else:
self.fail("""expected exception %(expected_exception)s not raised
called %(func)r
with args %(args)r
and kwargs %(kwargs)r
""" % locals ())
# -- Assertions used in more than one class --------------------
def assertParseFail(self, cmdline_args, expected_output):
"""
Assert the parser fails with the expected message. Caller
must ensure that self.parser is an InterceptingOptionParser.
"""
try:
self.parser.parse_args(cmdline_args)
except InterceptedError as err:
self.assertEqual(err.error_message, expected_output)
else:
self.assertFalse("expected parse failure")
def assertOutput(self,
cmdline_args,
expected_output,
expected_status=0,
expected_error=None):
"""Assert the parser prints the expected output on stdout."""
save_stdout = sys.stdout
try:
try:
sys.stdout = StringIO()
self.parser.parse_args(cmdline_args)
finally:
output = sys.stdout.getvalue()
sys.stdout = save_stdout
except InterceptedError as err:
self.assertTrue(
isinstance(output, str),
"expected output to be an ordinary string, not %r"
% type(output))
if output != expected_output:
self.fail("expected: \n'''\n" + expected_output +
"'''\nbut got \n'''\n" + output + "'''")
self.assertEqual(err.exit_status, expected_status)
self.assertEqual(err.exit_message, expected_error)
else:
self.assertFalse("expected parser.exit()")
def assertTypeError(self, func, expected_message, *args):
"""Assert that TypeError is raised when executing func."""
self.assertRaises(func, args, None, TypeError, expected_message)
def assertHelp(self, parser, expected_help):
actual_help = parser.format_help()
if actual_help != expected_help:
raise self.failureException(
'help text failure; expected:\n"' +
expected_help + '"; got:\n"' +
actual_help + '"\n')
# -- Test make_option() aka Option -------------------------------------
# It's not necessary to test correct options here. All the tests in the
# parser.parse_args() section deal with those, because they're needed
# there.
class TestOptionChecks(BaseTest):
def setUp(self):
self.parser = OptionParser(usage=SUPPRESS_USAGE)
def assertOptionError(self, expected_message, args=[], kwargs={}):
self.assertRaises(make_option, args, kwargs,
OptionError, expected_message)
def test_opt_string_empty(self):
self.assertTypeError(make_option,
"at least one option string must be supplied")
def test_opt_string_too_short(self):
self.assertOptionError(
"invalid option string 'b': must be at least two characters long",
["b"])
def test_opt_string_short_invalid(self):
self.assertOptionError(
"invalid short option string '--': must be "
"of the form -x, (x any non-dash char)",
["--"])
def test_opt_string_long_invalid(self):
self.assertOptionError(
"invalid long option string '---': "
"must start with --, followed by non-dash",
["---"])
def test_attr_invalid(self):
self.assertOptionError(
"option -b: invalid keyword arguments: bar, foo",
["-b"], {'foo': None, 'bar': None})
def test_action_invalid(self):
self.assertOptionError(
"option -b: invalid action: 'foo'",
["-b"], {'action': 'foo'})
def test_type_invalid(self):
self.assertOptionError(
"option -b: invalid option type: 'foo'",
["-b"], {'type': 'foo'})
self.assertOptionError(
"option -b: invalid option type: 'tuple'",
["-b"], {'type': tuple})
def test_no_type_for_action(self):
self.assertOptionError(
"option -b: must not supply a type for action 'count'",
["-b"], {'action': 'count', 'type': 'int'})
def test_no_choices_list(self):
self.assertOptionError(
"option -b/--bad: must supply a list of "
"choices for type 'choice'",
["-b", "--bad"], {'type': "choice"})
def test_bad_choices_list(self):
typename = type('').__name__
self.assertOptionError(
"option -b/--bad: choices must be a list of "
"strings ('%s' supplied)" % typename,
["-b", "--bad"],
{'type': "choice", 'choices':"bad choices"})
def test_no_choices_for_type(self):
self.assertOptionError(
"option -b: must not supply choices for type 'int'",
["-b"], {'type': 'int', 'choices':"bad"})
def test_no_const_for_action(self):
self.assertOptionError(
"option -b: 'const' must not be supplied for action 'store'",
["-b"], {'action': 'store', 'const': 1})
def test_no_nargs_for_action(self):
self.assertOptionError(
"option -b: 'nargs' must not be supplied for action 'count'",
["-b"], {'action': 'count', 'nargs': 2})
def test_callback_not_callable(self):
self.assertOptionError(
"option -b: callback not callable: 'foo'",
["-b"], {'action': 'callback',
'callback': 'foo'})
def dummy(self):
pass
def test_callback_args_no_tuple(self):
self.assertOptionError(
"option -b: callback_args, if supplied, "
"must be a tuple: not 'foo'",
["-b"], {'action': 'callback',
'callback': self.dummy,
'callback_args': 'foo'})
def test_callback_kwargs_no_dict(self):
self.assertOptionError(
"option -b: callback_kwargs, if supplied, "
"must be a dict: not 'foo'",
["-b"], {'action': 'callback',
'callback': self.dummy,
'callback_kwargs': 'foo'})
def test_no_callback_for_action(self):
self.assertOptionError(
"option -b: callback supplied ('foo') for non-callback option",
["-b"], {'action': 'store',
'callback': 'foo'})
def test_no_callback_args_for_action(self):
self.assertOptionError(
"option -b: callback_args supplied for non-callback option",
["-b"], {'action': 'store',
'callback_args': 'foo'})
def test_no_callback_kwargs_for_action(self):
self.assertOptionError(
"option -b: callback_kwargs supplied for non-callback option",
["-b"], {'action': 'store',
'callback_kwargs': 'foo'})
class TestOptionParser(BaseTest):
def setUp(self):
self.parser = OptionParser()
self.parser.add_option("-v", "--verbose", "-n", "--noisy",
action="store_true", dest="verbose")
self.parser.add_option("-q", "--quiet", "--silent",
action="store_false", dest="verbose")
def test_add_option_no_Option(self):
self.assertTypeError(self.parser.add_option,
"not an Option instance: None", None)
def test_add_option_invalid_arguments(self):
self.assertTypeError(self.parser.add_option,
"invalid arguments", None, None)
def test_get_option(self):
opt1 = self.parser.get_option("-v")
self.assertIsInstance(opt1, Option)
self.assertEqual(opt1._short_opts, ["-v", "-n"])
self.assertEqual(opt1._long_opts, ["--verbose", "--noisy"])
self.assertEqual(opt1.action, "store_true")
self.assertEqual(opt1.dest, "verbose")
def test_get_option_equals(self):
opt1 = self.parser.get_option("-v")
opt2 = self.parser.get_option("--verbose")
opt3 = self.parser.get_option("-n")
opt4 = self.parser.get_option("--noisy")
self.assertTrue(opt1 is opt2 is opt3 is opt4)
def test_has_option(self):
self.assertTrue(self.parser.has_option("-v"))
self.assertTrue(self.parser.has_option("--verbose"))
def assertTrueremoved(self):
self.assertTrue(self.parser.get_option("-v") is None)
self.assertTrue(self.parser.get_option("--verbose") is None)
self.assertTrue(self.parser.get_option("-n") is None)
self.assertTrue(self.parser.get_option("--noisy") is None)
self.assertFalse(self.parser.has_option("-v"))
self.assertFalse(self.parser.has_option("--verbose"))
self.assertFalse(self.parser.has_option("-n"))
self.assertFalse(self.parser.has_option("--noisy"))
self.assertTrue(self.parser.has_option("-q"))
self.assertTrue(self.parser.has_option("--silent"))
def test_remove_short_opt(self):
self.parser.remove_option("-n")
self.assertTrueremoved()
def test_remove_long_opt(self):
self.parser.remove_option("--verbose")
self.assertTrueremoved()
def test_remove_nonexistent(self):
self.assertRaises(self.parser.remove_option, ('foo',), None,
ValueError, "no such option 'foo'")
def test_refleak(self):
# If an OptionParser is carrying around a reference to a large
# object, various cycles can prevent it from being GC'd in
# a timely fashion. destroy() breaks the cycles to ensure stuff
# can be cleaned up.
big_thing = [42]
refcount = sys.getrefcount(big_thing)
parser = OptionParser()
parser.add_option("-a", "--aaarggh")
parser.big_thing = big_thing
parser.destroy()
#self.assertEqual(refcount, sys.getrefcount(big_thing))
del parser
self.assertEqual(refcount, sys.getrefcount(big_thing))
class TestOptionValues(BaseTest):
def setUp(self):
pass
def test_basics(self):
values = Values()
self.assertEqual(vars(values), {})
self.assertEqual(values, {})
self.assertNotEqual(values, {"foo": "bar"})
self.assertNotEqual(values, "")
dict = {"foo": "bar", "baz": 42}
values = Values(defaults=dict)
self.assertEqual(vars(values), dict)
self.assertEqual(values, dict)
self.assertNotEqual(values, {"foo": "bar"})
self.assertNotEqual(values, {})
self.assertNotEqual(values, "")
self.assertNotEqual(values, [])
class TestTypeAliases(BaseTest):
def setUp(self):
self.parser = OptionParser()
def test_str_aliases_string(self):
self.parser.add_option("-s", type="str")
self.assertEqual(self.parser.get_option("-s").type, "string")
def test_type_object(self):
self.parser.add_option("-s", type=str)
self.assertEqual(self.parser.get_option("-s").type, "string")
self.parser.add_option("-x", type=int)
self.assertEqual(self.parser.get_option("-x").type, "int")
# Custom type for testing processing of default values.
_time_units = { 's' : 1, 'm' : 60, 'h' : 60*60, 'd' : 60*60*24 }
def _check_duration(option, opt, value):
try:
if value[-1].isdigit():
return int(value)
else:
return int(value[:-1]) * _time_units[value[-1]]
except (ValueError, IndexError):
raise OptionValueError(
'option %s: invalid duration: %r' % (opt, value))
class DurationOption(Option):
TYPES = Option.TYPES + ('duration',)
TYPE_CHECKER = copy.copy(Option.TYPE_CHECKER)
TYPE_CHECKER['duration'] = _check_duration
class TestDefaultValues(BaseTest):
def setUp(self):
self.parser = OptionParser()
self.parser.add_option("-v", "--verbose", default=True)
self.parser.add_option("-q", "--quiet", dest='verbose')
self.parser.add_option("-n", type="int", default=37)
self.parser.add_option("-m", type="int")
self.parser.add_option("-s", default="foo")
self.parser.add_option("-t")
self.parser.add_option("-u", default=None)
self.expected = { 'verbose': True,
'n': 37,
'm': None,
's': "foo",
't': None,
'u': None }
def test_basic_defaults(self):
self.assertEqual(self.parser.get_default_values(), self.expected)
def test_mixed_defaults_post(self):
self.parser.set_defaults(n=42, m=-100)
self.expected.update({'n': 42, 'm': -100})
self.assertEqual(self.parser.get_default_values(), self.expected)
def test_mixed_defaults_pre(self):
self.parser.set_defaults(x="barf", y="blah")
self.parser.add_option("-x", default="frob")
self.parser.add_option("-y")
self.expected.update({'x': "frob", 'y': "blah"})
self.assertEqual(self.parser.get_default_values(), self.expected)
self.parser.remove_option("-y")
self.parser.add_option("-y", default=None)
self.expected.update({'y': None})
self.assertEqual(self.parser.get_default_values(), self.expected)
def test_process_default(self):
self.parser.option_class = DurationOption
self.parser.add_option("-d", type="duration", default=300)
self.parser.add_option("-e", type="duration", default="6m")
self.parser.set_defaults(n="42")
self.expected.update({'d': 300, 'e': 360, 'n': 42})
self.assertEqual(self.parser.get_default_values(), self.expected)
self.parser.set_process_default_values(False)
self.expected.update({'d': 300, 'e': "6m", 'n': "42"})
self.assertEqual(self.parser.get_default_values(), self.expected)
class TestProgName(BaseTest):
"""
Test that %prog expands to the right thing in usage, version,
and help strings.
"""
def assertUsage(self, parser, expected_usage):
self.assertEqual(parser.get_usage(), expected_usage)
def assertVersion(self, parser, expected_version):
self.assertEqual(parser.get_version(), expected_version)
def test_default_progname(self):
# Make sure that program name taken from sys.argv[0] by default.
save_argv = sys.argv[:]
try:
sys.argv[0] = os.path.join("foo", "bar", "baz.py")
parser = OptionParser("%prog ...", version="%prog 1.2")
expected_usage = "Usage: baz.py ...\n"
self.assertUsage(parser, expected_usage)
self.assertVersion(parser, "baz.py 1.2")
self.assertHelp(parser,
expected_usage + "\n" +
"Options:\n"
" --version show program's version number and exit\n"
" -h, --help show this help message and exit\n")
finally:
sys.argv[:] = save_argv
def test_custom_progname(self):
parser = OptionParser(prog="thingy",
version="%prog 0.1",
usage="%prog arg arg")
parser.remove_option("-h")
parser.remove_option("--version")
expected_usage = "Usage: thingy arg arg\n"
self.assertUsage(parser, expected_usage)
self.assertVersion(parser, "thingy 0.1")
self.assertHelp(parser, expected_usage + "\n")
class TestExpandDefaults(BaseTest):
def setUp(self):
self.parser = OptionParser(prog="test")
self.help_prefix = """\
Usage: test [options]
Options:
-h, --help show this help message and exit
"""
self.file_help = "read from FILE [default: %default]"
self.expected_help_file = self.help_prefix + \
" -f FILE, --file=FILE read from FILE [default: foo.txt]\n"
self.expected_help_none = self.help_prefix + \
" -f FILE, --file=FILE read from FILE [default: none]\n"
def test_option_default(self):
self.parser.add_option("-f", "--file",
default="foo.txt",
help=self.file_help)
self.assertHelp(self.parser, self.expected_help_file)
def test_parser_default_1(self):
self.parser.add_option("-f", "--file",
help=self.file_help)
self.parser.set_default('file', "foo.txt")
self.assertHelp(self.parser, self.expected_help_file)
def test_parser_default_2(self):
self.parser.add_option("-f", "--file",
help=self.file_help)
self.parser.set_defaults(file="foo.txt")
self.assertHelp(self.parser, self.expected_help_file)
def test_no_default(self):
self.parser.add_option("-f", "--file",
help=self.file_help)
self.assertHelp(self.parser, self.expected_help_none)
def test_default_none_1(self):
self.parser.add_option("-f", "--file",
default=None,
help=self.file_help)
self.assertHelp(self.parser, self.expected_help_none)
def test_default_none_2(self):
self.parser.add_option("-f", "--file",
help=self.file_help)
self.parser.set_defaults(file=None)
self.assertHelp(self.parser, self.expected_help_none)
def test_float_default(self):
self.parser.add_option(
"-p", "--prob",
help="blow up with probability PROB [default: %default]")
self.parser.set_defaults(prob=0.43)
expected_help = self.help_prefix + \
" -p PROB, --prob=PROB blow up with probability PROB [default: 0.43]\n"
self.assertHelp(self.parser, expected_help)
def test_alt_expand(self):
self.parser.add_option("-f", "--file",
default="foo.txt",
help="read from FILE [default: *DEFAULT*]")
self.parser.formatter.default_tag = "*DEFAULT*"
self.assertHelp(self.parser, self.expected_help_file)
def test_no_expand(self):
self.parser.add_option("-f", "--file",
default="foo.txt",
help="read from %default file")
self.parser.formatter.default_tag = None
expected_help = self.help_prefix + \
" -f FILE, --file=FILE read from %default file\n"
self.assertHelp(self.parser, expected_help)
# -- Test parser.parse_args() ------------------------------------------
class TestStandard(BaseTest):
def setUp(self):
options = [make_option("-a", type="string"),
make_option("-b", "--boo", type="int", dest='boo'),
make_option("--foo", action="append")]
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE,
option_list=options)
def test_required_value(self):
self.assertParseFail(["-a"], "-a option requires an argument")
def test_invalid_integer(self):
self.assertParseFail(["-b", "5x"],
"option -b: invalid integer value: '5x'")
def test_no_such_option(self):
self.assertParseFail(["--boo13"], "no such option: --boo13")
def test_long_invalid_integer(self):
self.assertParseFail(["--boo=x5"],
"option --boo: invalid integer value: 'x5'")
def test_empty(self):
self.assertParseOK([], {'a': None, 'boo': None, 'foo': None}, [])
def test_shortopt_empty_longopt_append(self):
self.assertParseOK(["-a", "", "--foo=blah", "--foo="],
{'a': "", 'boo': None, 'foo': ["blah", ""]},
[])
def test_long_option_append(self):
self.assertParseOK(["--foo", "bar", "--foo", "", "--foo=x"],
{'a': None,
'boo': None,
'foo': ["bar", "", "x"]},
[])
def test_option_argument_joined(self):
self.assertParseOK(["-abc"],
{'a': "bc", 'boo': None, 'foo': None},
[])
def test_option_argument_split(self):
self.assertParseOK(["-a", "34"],
{'a': "34", 'boo': None, 'foo': None},
[])
def test_option_argument_joined_integer(self):
self.assertParseOK(["-b34"],
{'a': None, 'boo': 34, 'foo': None},
[])
def test_option_argument_split_negative_integer(self):
self.assertParseOK(["-b", "-5"],
{'a': None, 'boo': -5, 'foo': None},
[])
def test_long_option_argument_joined(self):
self.assertParseOK(["--boo=13"],
{'a': None, 'boo': 13, 'foo': None},
[])
def test_long_option_argument_split(self):
self.assertParseOK(["--boo", "111"],
{'a': None, 'boo': 111, 'foo': None},
[])
def test_long_option_short_option(self):
self.assertParseOK(["--foo=bar", "-axyz"],
{'a': 'xyz', 'boo': None, 'foo': ["bar"]},
[])
def test_abbrev_long_option(self):
self.assertParseOK(["--f=bar", "-axyz"],
{'a': 'xyz', 'boo': None, 'foo': ["bar"]},
[])
def test_defaults(self):
(options, args) = self.parser.parse_args([])
defaults = self.parser.get_default_values()
self.assertEqual(vars(defaults), vars(options))
def test_ambiguous_option(self):
self.parser.add_option("--foz", action="store",
type="string", dest="foo")
self.assertParseFail(["--f=bar"],
"ambiguous option: --f (--foo, --foz?)")
def test_short_and_long_option_split(self):
self.assertParseOK(["-a", "xyz", "--foo", "bar"],
{'a': 'xyz', 'boo': None, 'foo': ["bar"]},
[]),
def test_short_option_split_long_option_append(self):
self.assertParseOK(["--foo=bar", "-b", "123", "--foo", "baz"],
{'a': None, 'boo': 123, 'foo': ["bar", "baz"]},
[])
def test_short_option_split_one_positional_arg(self):
self.assertParseOK(["-a", "foo", "bar"],
{'a': "foo", 'boo': None, 'foo': None},
["bar"]),
def test_short_option_consumes_separator(self):
self.assertParseOK(["-a", "--", "foo", "bar"],
{'a': "--", 'boo': None, 'foo': None},
["foo", "bar"]),
self.assertParseOK(["-a", "--", "--foo", "bar"],
{'a': "--", 'boo': None, 'foo': ["bar"]},
[]),
def test_short_option_joined_and_separator(self):
self.assertParseOK(["-ab", "--", "--foo", "bar"],
{'a': "b", 'boo': None, 'foo': None},
["--foo", "bar"]),
def test_hyphen_becomes_positional_arg(self):
self.assertParseOK(["-ab", "-", "--foo", "bar"],
{'a': "b", 'boo': None, 'foo': ["bar"]},
["-"])
def test_no_append_versus_append(self):
self.assertParseOK(["-b3", "-b", "5", "--foo=bar", "--foo", "baz"],
{'a': None, 'boo': 5, 'foo': ["bar", "baz"]},
[])
def test_option_consumes_optionlike_string(self):
self.assertParseOK(["-a", "-b3"],
{'a': "-b3", 'boo': None, 'foo': None},
[])
def test_combined_single_invalid_option(self):
self.parser.add_option("-t", action="store_true")
self.assertParseFail(["-test"],
"no such option: -e")
class TestBool(BaseTest):
def setUp(self):
options = [make_option("-v",
"--verbose",
action="store_true",
dest="verbose",
default=''),
make_option("-q",
"--quiet",
action="store_false",
dest="verbose")]
self.parser = OptionParser(option_list = options)
def test_bool_default(self):
self.assertParseOK([],
{'verbose': ''},
[])
def test_bool_false(self):
(options, args) = self.assertParseOK(["-q"],
{'verbose': 0},
[])
self.assertTrue(options.verbose is False)
def test_bool_true(self):
(options, args) = self.assertParseOK(["-v"],
{'verbose': 1},
[])
self.assertTrue(options.verbose is True)
def test_bool_flicker_on_and_off(self):
self.assertParseOK(["-qvq", "-q", "-v"],
{'verbose': 1},
[])
class TestChoice(BaseTest):
def setUp(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE)
self.parser.add_option("-c", action="store", type="choice",
dest="choice", choices=["one", "two", "three"])
def test_valid_choice(self):
self.assertParseOK(["-c", "one", "xyz"],
{'choice': 'one'},
["xyz"])
def test_invalid_choice(self):
self.assertParseFail(["-c", "four", "abc"],
"option -c: invalid choice: 'four' "
"(choose from 'one', 'two', 'three')")
def test_add_choice_option(self):
self.parser.add_option("-d", "--default",
choices=["four", "five", "six"])
opt = self.parser.get_option("-d")
self.assertEqual(opt.type, "choice")
self.assertEqual(opt.action, "store")
class TestCount(BaseTest):
def setUp(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE)
self.v_opt = make_option("-v", action="count", dest="verbose")
self.parser.add_option(self.v_opt)
self.parser.add_option("--verbose", type="int", dest="verbose")
self.parser.add_option("-q", "--quiet",
action="store_const", dest="verbose", const=0)
def test_empty(self):
self.assertParseOK([], {'verbose': None}, [])
def test_count_one(self):
self.assertParseOK(["-v"], {'verbose': 1}, [])
def test_count_three(self):
self.assertParseOK(["-vvv"], {'verbose': 3}, [])
def test_count_three_apart(self):
self.assertParseOK(["-v", "-v", "-v"], {'verbose': 3}, [])
def test_count_override_amount(self):
self.assertParseOK(["-vvv", "--verbose=2"], {'verbose': 2}, [])
def test_count_override_quiet(self):
self.assertParseOK(["-vvv", "--verbose=2", "-q"], {'verbose': 0}, [])
def test_count_overriding(self):
self.assertParseOK(["-vvv", "--verbose=2", "-q", "-v"],
{'verbose': 1}, [])
def test_count_interspersed_args(self):
self.assertParseOK(["--quiet", "3", "-v"],
{'verbose': 1},
["3"])
def test_count_no_interspersed_args(self):
self.parser.disable_interspersed_args()
self.assertParseOK(["--quiet", "3", "-v"],
{'verbose': 0},
["3", "-v"])
def test_count_no_such_option(self):
self.assertParseFail(["-q3", "-v"], "no such option: -3")
def test_count_option_no_value(self):
self.assertParseFail(["--quiet=3", "-v"],
"--quiet option does not take a value")
def test_count_with_default(self):
self.parser.set_default('verbose', 0)
self.assertParseOK([], {'verbose':0}, [])
def test_count_overriding_default(self):
self.parser.set_default('verbose', 0)
self.assertParseOK(["-vvv", "--verbose=2", "-q", "-v"],
{'verbose': 1}, [])
class TestMultipleArgs(BaseTest):
def setUp(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE)
self.parser.add_option("-p", "--point",
action="store", nargs=3, type="float", dest="point")
def test_nargs_with_positional_args(self):
self.assertParseOK(["foo", "-p", "1", "2.5", "-4.3", "xyz"],
{'point': (1.0, 2.5, -4.3)},
["foo", "xyz"])
def test_nargs_long_opt(self):
self.assertParseOK(["--point", "-1", "2.5", "-0", "xyz"],
{'point': (-1.0, 2.5, -0.0)},
["xyz"])
def test_nargs_invalid_float_value(self):
self.assertParseFail(["-p", "1.0", "2x", "3.5"],
"option -p: "
"invalid floating-point value: '2x'")
def test_nargs_required_values(self):
self.assertParseFail(["--point", "1.0", "3.5"],
"--point option requires 3 arguments")
class TestMultipleArgsAppend(BaseTest):
def setUp(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE)
self.parser.add_option("-p", "--point", action="store", nargs=3,
type="float", dest="point")
self.parser.add_option("-f", "--foo", action="append", nargs=2,
type="int", dest="foo")
self.parser.add_option("-z", "--zero", action="append_const",
dest="foo", const=(0, 0))
def test_nargs_append(self):
self.assertParseOK(["-f", "4", "-3", "blah", "--foo", "1", "666"],
{'point': None, 'foo': [(4, -3), (1, 666)]},
["blah"])
def test_nargs_append_required_values(self):
self.assertParseFail(["-f4,3"],
"-f option requires 2 arguments")
def test_nargs_append_simple(self):
self.assertParseOK(["--foo=3", "4"],
{'point': None, 'foo':[(3, 4)]},
[])
def test_nargs_append_const(self):
self.assertParseOK(["--zero", "--foo", "3", "4", "-z"],
{'point': None, 'foo':[(0, 0), (3, 4), (0, 0)]},
[])
class TestVersion(BaseTest):
def test_version(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE,
version="%prog 0.1")
save_argv = sys.argv[:]
try:
sys.argv[0] = os.path.join(os.curdir, "foo", "bar")
self.assertOutput(["--version"], "bar 0.1\n")
finally:
sys.argv[:] = save_argv
def test_no_version(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE)
self.assertParseFail(["--version"],
"no such option: --version")
# -- Test conflicting default values and parser.parse_args() -----------
class TestConflictingDefaults(BaseTest):
"""Conflicting default values: the last one should win."""
def setUp(self):
self.parser = OptionParser(option_list=[
make_option("-v", action="store_true", dest="verbose", default=1)])
def test_conflict_default(self):
self.parser.add_option("-q", action="store_false", dest="verbose",
default=0)
self.assertParseOK([], {'verbose': 0}, [])
def test_conflict_default_none(self):
self.parser.add_option("-q", action="store_false", dest="verbose",
default=None)
self.assertParseOK([], {'verbose': None}, [])
class TestOptionGroup(BaseTest):
def setUp(self):
self.parser = OptionParser(usage=SUPPRESS_USAGE)
def test_option_group_create_instance(self):
group = OptionGroup(self.parser, "Spam")
self.parser.add_option_group(group)
group.add_option("--spam", action="store_true",
help="spam spam spam spam")
self.assertParseOK(["--spam"], {'spam': 1}, [])
def test_add_group_no_group(self):
self.assertTypeError(self.parser.add_option_group,
"not an OptionGroup instance: None", None)
def test_add_group_invalid_arguments(self):
self.assertTypeError(self.parser.add_option_group,
"invalid arguments", None, None)
def test_add_group_wrong_parser(self):
group = OptionGroup(self.parser, "Spam")
group.parser = OptionParser()
self.assertRaises(self.parser.add_option_group, (group,), None,
ValueError, "invalid OptionGroup (wrong parser)")
def test_group_manipulate(self):
group = self.parser.add_option_group("Group 2",
description="Some more options")
group.set_title("Bacon")
group.add_option("--bacon", type="int")
self.assertTrue(self.parser.get_option_group("--bacon"), group)
# -- Test extending and parser.parse_args() ----------------------------
class TestExtendAddTypes(BaseTest):
def setUp(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE,
option_class=self.MyOption)
self.parser.add_option("-a", None, type="string", dest="a")
self.parser.add_option("-f", "--file", type="file", dest="file")
def tearDown(self):
if os.path.isdir(support.TESTFN):
os.rmdir(support.TESTFN)
elif os.path.isfile(support.TESTFN):
os.unlink(support.TESTFN)
class MyOption (Option):
def check_file(option, opt, value):
if not os.path.exists(value):
raise OptionValueError("%s: file does not exist" % value)
elif not os.path.isfile(value):
raise OptionValueError("%s: not a regular file" % value)
return value
TYPES = Option.TYPES + ("file",)
TYPE_CHECKER = copy.copy(Option.TYPE_CHECKER)
TYPE_CHECKER["file"] = check_file
def test_filetype_ok(self):
open(support.TESTFN, "w").close()
self.assertParseOK(["--file", support.TESTFN, "-afoo"],
{'file': support.TESTFN, 'a': 'foo'},
[])
def test_filetype_noexist(self):
self.assertParseFail(["--file", support.TESTFN, "-afoo"],
"%s: file does not exist" %
support.TESTFN)
def test_filetype_notfile(self):
os.mkdir(support.TESTFN)
self.assertParseFail(["--file", support.TESTFN, "-afoo"],
"%s: not a regular file" %
support.TESTFN)
class TestExtendAddActions(BaseTest):
def setUp(self):
options = [self.MyOption("-a", "--apple", action="extend",
type="string", dest="apple")]
self.parser = OptionParser(option_list=options)
class MyOption (Option):
ACTIONS = Option.ACTIONS + ("extend",)
STORE_ACTIONS = Option.STORE_ACTIONS + ("extend",)
TYPED_ACTIONS = Option.TYPED_ACTIONS + ("extend",)
def take_action(self, action, dest, opt, value, values, parser):
if action == "extend":
lvalue = value.split(",")
values.ensure_value(dest, []).extend(lvalue)
else:
Option.take_action(self, action, dest, opt, parser, value,
values)
def test_extend_add_action(self):
self.assertParseOK(["-afoo,bar", "--apple=blah"],
{'apple': ["foo", "bar", "blah"]},
[])
def test_extend_add_action_normal(self):
self.assertParseOK(["-a", "foo", "-abar", "--apple=x,y"],
{'apple': ["foo", "bar", "x", "y"]},
[])
# -- Test callbacks and parser.parse_args() ----------------------------
class TestCallback(BaseTest):
def setUp(self):
options = [make_option("-x",
None,
action="callback",
callback=self.process_opt),
make_option("-f",
"--file",
action="callback",
callback=self.process_opt,
type="string",
dest="filename")]
self.parser = OptionParser(option_list=options)
def process_opt(self, option, opt, value, parser_):
if opt == "-x":
self.assertEqual(option._short_opts, ["-x"])
self.assertEqual(option._long_opts, [])
self.assertTrue(parser_ is self.parser)
self.assertTrue(value is None)
self.assertEqual(vars(parser_.values), {'filename': None})
parser_.values.x = 42
elif opt == "--file":
self.assertEqual(option._short_opts, ["-f"])
self.assertEqual(option._long_opts, ["--file"])
self.assertTrue(parser_ is self.parser)
self.assertEqual(value, "foo")
self.assertEqual(vars(parser_.values), {'filename': None, 'x': 42})
setattr(parser_.values, option.dest, value)
else:
self.fail("Unknown option %r in process_opt." % opt)
def test_callback(self):
self.assertParseOK(["-x", "--file=foo"],
{'filename': "foo", 'x': 42},
[])
def test_callback_help(self):
# This test was prompted by SF bug #960515 -- the point is
# not to inspect the help text, just to make sure that
# format_help() doesn't crash.
parser = OptionParser(usage=SUPPRESS_USAGE)
parser.remove_option("-h")
parser.add_option("-t", "--test", action="callback",
callback=lambda: None, type="string",
help="foo")
expected_help = ("Options:\n"
" -t TEST, --test=TEST foo\n")
self.assertHelp(parser, expected_help)
class TestCallbackExtraArgs(BaseTest):
def setUp(self):
options = [make_option("-p", "--point", action="callback",
callback=self.process_tuple,
callback_args=(3, int), type="string",
dest="points", default=[])]
self.parser = OptionParser(option_list=options)
def process_tuple(self, option, opt, value, parser_, len, type):
self.assertEqual(len, 3)
self.assertTrue(type is int)
if opt == "-p":
self.assertEqual(value, "1,2,3")
elif opt == "--point":
self.assertEqual(value, "4,5,6")
value = tuple(map(type, value.split(",")))
getattr(parser_.values, option.dest).append(value)
def test_callback_extra_args(self):
self.assertParseOK(["-p1,2,3", "--point", "4,5,6"],
{'points': [(1,2,3), (4,5,6)]},
[])
class TestCallbackMeddleArgs(BaseTest):
def setUp(self):
options = [make_option(str(x), action="callback",
callback=self.process_n, dest='things')
for x in range(-1, -6, -1)]
self.parser = OptionParser(option_list=options)
# Callback that meddles in rargs, largs
def process_n(self, option, opt, value, parser_):
# option is -3, -5, etc.
nargs = int(opt[1:])
rargs = parser_.rargs
if len(rargs) < nargs:
self.fail("Expected %d arguments for %s option." % (nargs, opt))
dest = parser_.values.ensure_value(option.dest, [])
dest.append(tuple(rargs[0:nargs]))
parser_.largs.append(nargs)
del rargs[0:nargs]
def test_callback_meddle_args(self):
self.assertParseOK(["-1", "foo", "-3", "bar", "baz", "qux"],
{'things': [("foo",), ("bar", "baz", "qux")]},
[1, 3])
def test_callback_meddle_args_separator(self):
self.assertParseOK(["-2", "foo", "--"],
{'things': [('foo', '--')]},
[2])
class TestCallbackManyArgs(BaseTest):
def setUp(self):
options = [make_option("-a", "--apple", action="callback", nargs=2,
callback=self.process_many, type="string"),
make_option("-b", "--bob", action="callback", nargs=3,
callback=self.process_many, type="int")]
self.parser = OptionParser(option_list=options)
def process_many(self, option, opt, value, parser_):
if opt == "-a":
self.assertEqual(value, ("foo", "bar"))
elif opt == "--apple":
self.assertEqual(value, ("ding", "dong"))
elif opt == "-b":
self.assertEqual(value, (1, 2, 3))
elif opt == "--bob":
self.assertEqual(value, (-666, 42, 0))
def test_many_args(self):
self.assertParseOK(["-a", "foo", "bar", "--apple", "ding", "dong",
"-b", "1", "2", "3", "--bob", "-666", "42",
"0"],
{"apple": None, "bob": None},
[])
class TestCallbackCheckAbbrev(BaseTest):
def setUp(self):
self.parser = OptionParser()
self.parser.add_option("--foo-bar", action="callback",
callback=self.check_abbrev)
def check_abbrev(self, option, opt, value, parser):
self.assertEqual(opt, "--foo-bar")
def test_abbrev_callback_expansion(self):
self.assertParseOK(["--foo"], {}, [])
class TestCallbackVarArgs(BaseTest):
def setUp(self):
options = [make_option("-a", type="int", nargs=2, dest="a"),
make_option("-b", action="store_true", dest="b"),
make_option("-c", "--callback", action="callback",
callback=self.variable_args, dest="c")]
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE,
option_list=options)
def variable_args(self, option, opt, value, parser):
self.assertTrue(value is None)
value = []
rargs = parser.rargs
while rargs:
arg = rargs[0]
if ((arg[:2] == "--" and len(arg) > 2) or
(arg[:1] == "-" and len(arg) > 1 and arg[1] != "-")):
break
else:
value.append(arg)
del rargs[0]
setattr(parser.values, option.dest, value)
def test_variable_args(self):
self.assertParseOK(["-a3", "-5", "--callback", "foo", "bar"],
{'a': (3, -5), 'b': None, 'c': ["foo", "bar"]},
[])
def test_consume_separator_stop_at_option(self):
self.assertParseOK(["-c", "37", "--", "xxx", "-b", "hello"],
{'a': None,
'b': True,
'c': ["37", "--", "xxx"]},
["hello"])
def test_positional_arg_and_variable_args(self):
self.assertParseOK(["hello", "-c", "foo", "-", "bar"],
{'a': None,
'b': None,
'c':["foo", "-", "bar"]},
["hello"])
def test_stop_at_option(self):
self.assertParseOK(["-c", "foo", "-b"],
{'a': None, 'b': True, 'c': ["foo"]},
[])
def test_stop_at_invalid_option(self):
self.assertParseFail(["-c", "3", "-5", "-a"], "no such option: -5")
# -- Test conflict handling and parser.parse_args() --------------------
class ConflictBase(BaseTest):
def setUp(self):
options = [make_option("-v", "--verbose", action="count",
dest="verbose", help="increment verbosity")]
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE,
option_list=options)
def show_version(self, option, opt, value, parser):
parser.values.show_version = 1
class TestConflict(ConflictBase):
"""Use the default conflict resolution for Optik 1.2: error."""
def assertTrueconflict_error(self, func):
err = self.assertRaises(
func, ("-v", "--version"), {'action' : "callback",
'callback' : self.show_version,
'help' : "show version"},
OptionConflictError,
"option -v/--version: conflicting option string(s): -v")
self.assertEqual(err.msg, "conflicting option string(s): -v")
self.assertEqual(err.option_id, "-v/--version")
def test_conflict_error(self):
self.assertTrueconflict_error(self.parser.add_option)
def test_conflict_error_group(self):
group = OptionGroup(self.parser, "Group 1")
self.assertTrueconflict_error(group.add_option)
def test_no_such_conflict_handler(self):
self.assertRaises(
self.parser.set_conflict_handler, ('foo',), None,
ValueError, "invalid conflict_resolution value 'foo'")
class TestConflictResolve(ConflictBase):
def setUp(self):
ConflictBase.setUp(self)
self.parser.set_conflict_handler("resolve")
self.parser.add_option("-v", "--version", action="callback",
callback=self.show_version, help="show version")
def test_conflict_resolve(self):
v_opt = self.parser.get_option("-v")
verbose_opt = self.parser.get_option("--verbose")
version_opt = self.parser.get_option("--version")
self.assertTrue(v_opt is version_opt)
self.assertTrue(v_opt is not verbose_opt)
self.assertEqual(v_opt._long_opts, ["--version"])
self.assertEqual(version_opt._short_opts, ["-v"])
self.assertEqual(version_opt._long_opts, ["--version"])
self.assertEqual(verbose_opt._short_opts, [])
self.assertEqual(verbose_opt._long_opts, ["--verbose"])
def test_conflict_resolve_help(self):
self.assertOutput(["-h"], """\
Options:
--verbose increment verbosity
-h, --help show this help message and exit
-v, --version show version
""")
def test_conflict_resolve_short_opt(self):
self.assertParseOK(["-v"],
{'verbose': None, 'show_version': 1},
[])
def test_conflict_resolve_long_opt(self):
self.assertParseOK(["--verbose"],
{'verbose': 1},
[])
def test_conflict_resolve_long_opts(self):
self.assertParseOK(["--verbose", "--version"],
{'verbose': 1, 'show_version': 1},
[])
class TestConflictOverride(BaseTest):
def setUp(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE)
self.parser.set_conflict_handler("resolve")
self.parser.add_option("-n", "--dry-run",
action="store_true", dest="dry_run",
help="don't do anything")
self.parser.add_option("--dry-run", "-n",
action="store_const", const=42, dest="dry_run",
help="dry run mode")
def test_conflict_override_opts(self):
opt = self.parser.get_option("--dry-run")
self.assertEqual(opt._short_opts, ["-n"])
self.assertEqual(opt._long_opts, ["--dry-run"])
def test_conflict_override_help(self):
self.assertOutput(["-h"], """\
Options:
-h, --help show this help message and exit
-n, --dry-run dry run mode
""")
def test_conflict_override_args(self):
self.assertParseOK(["-n"],
{'dry_run': 42},
[])
# -- Other testing. ----------------------------------------------------
_expected_help_basic = """\
Usage: bar.py [options]
Options:
-a APPLE throw APPLEs at basket
-b NUM, --boo=NUM shout "boo!" NUM times (in order to frighten away all the
evil spirits that cause trouble and mayhem)
--foo=FOO store FOO in the foo list for later fooing
-h, --help show this help message and exit
"""
_expected_help_long_opts_first = """\
Usage: bar.py [options]
Options:
-a APPLE throw APPLEs at basket
--boo=NUM, -b NUM shout "boo!" NUM times (in order to frighten away all the
evil spirits that cause trouble and mayhem)
--foo=FOO store FOO in the foo list for later fooing
--help, -h show this help message and exit
"""
_expected_help_title_formatter = """\
Usage
=====
bar.py [options]
Options
=======
-a APPLE throw APPLEs at basket
--boo=NUM, -b NUM shout "boo!" NUM times (in order to frighten away all the
evil spirits that cause trouble and mayhem)
--foo=FOO store FOO in the foo list for later fooing
--help, -h show this help message and exit
"""
_expected_help_short_lines = """\
Usage: bar.py [options]
Options:
-a APPLE throw APPLEs at basket
-b NUM, --boo=NUM shout "boo!" NUM times (in order to
frighten away all the evil spirits
that cause trouble and mayhem)
--foo=FOO store FOO in the foo list for later
fooing
-h, --help show this help message and exit
"""
class TestHelp(BaseTest):
def setUp(self):
self.parser = self.make_parser(80)
def make_parser(self, columns):
options = [
make_option("-a", type="string", dest='a',
metavar="APPLE", help="throw APPLEs at basket"),
make_option("-b", "--boo", type="int", dest='boo',
metavar="NUM",
help=
"shout \"boo!\" NUM times (in order to frighten away "
"all the evil spirits that cause trouble and mayhem)"),
make_option("--foo", action="append", type="string", dest='foo',
help="store FOO in the foo list for later fooing"),
]
# We need to set COLUMNS for the OptionParser constructor, but
# we must restore its original value -- otherwise, this test
# screws things up for other tests when it's part of the Python
# test suite.
with support.EnvironmentVarGuard() as env:
env['COLUMNS'] = str(columns)
return InterceptingOptionParser(option_list=options)
def assertHelpEquals(self, expected_output):
save_argv = sys.argv[:]
try:
# Make optparse believe bar.py is being executed.
sys.argv[0] = os.path.join("foo", "bar.py")
self.assertOutput(["-h"], expected_output)
finally:
sys.argv[:] = save_argv
def test_help(self):
self.assertHelpEquals(_expected_help_basic)
def test_help_old_usage(self):
self.parser.set_usage("Usage: %prog [options]")
self.assertHelpEquals(_expected_help_basic)
def test_help_long_opts_first(self):
self.parser.formatter.short_first = 0
self.assertHelpEquals(_expected_help_long_opts_first)
def test_help_title_formatter(self):
with support.EnvironmentVarGuard() as env:
env["COLUMNS"] = "80"
self.parser.formatter = TitledHelpFormatter()
self.assertHelpEquals(_expected_help_title_formatter)
def test_wrap_columns(self):
# Ensure that wrapping respects $COLUMNS environment variable.
# Need to reconstruct the parser, since that's the only time
# we look at $COLUMNS.
self.parser = self.make_parser(60)
self.assertHelpEquals(_expected_help_short_lines)
def test_help_unicode(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE)
self.parser.add_option("-a", action="store_true", help="ol\u00E9!")
expect = """\
Options:
-h, --help show this help message and exit
-a ol\u00E9!
"""
self.assertHelpEquals(expect)
def test_help_unicode_description(self):
self.parser = InterceptingOptionParser(usage=SUPPRESS_USAGE,
description="ol\u00E9!")
expect = """\
ol\u00E9!
Options:
-h, --help show this help message and exit
"""
self.assertHelpEquals(expect)
def test_help_description_groups(self):
self.parser.set_description(
"This is the program description for %prog. %prog has "
"an option group as well as single options.")
group = OptionGroup(
self.parser, "Dangerous Options",
"Caution: use of these options is at your own risk. "
"It is believed that some of them bite.")
group.add_option("-g", action="store_true", help="Group option.")
self.parser.add_option_group(group)
expect = """\
Usage: bar.py [options]
This is the program description for bar.py. bar.py has an option group as
well as single options.
Options:
-a APPLE throw APPLEs at basket
-b NUM, --boo=NUM shout "boo!" NUM times (in order to frighten away all the
evil spirits that cause trouble and mayhem)
--foo=FOO store FOO in the foo list for later fooing
-h, --help show this help message and exit
Dangerous Options:
Caution: use of these options is at your own risk. It is believed
that some of them bite.
-g Group option.
"""
self.assertHelpEquals(expect)
self.parser.epilog = "Please report bugs to /dev/null."
self.assertHelpEquals(expect + "\nPlease report bugs to /dev/null.\n")
class TestMatchAbbrev(BaseTest):
def test_match_abbrev(self):
self.assertEqual(_match_abbrev("--f",
{"--foz": None,
"--foo": None,
"--fie": None,
"--f": None}),
"--f")
def test_match_abbrev_error(self):
s = "--f"
wordmap = {"--foz": None, "--foo": None, "--fie": None}
self.assertRaises(
_match_abbrev, (s, wordmap), None,
BadOptionError, "ambiguous option: --f (--fie, --foo, --foz?)")
class TestParseNumber(BaseTest):
def setUp(self):
self.parser = InterceptingOptionParser()
self.parser.add_option("-n", type=int)
self.parser.add_option("-l", type=int)
def test_parse_num_fail(self):
self.assertRaises(
_parse_num, ("", int), {},
ValueError,
re.compile(r"invalid literal for int().*: '?'?"))
self.assertRaises(
_parse_num, ("0xOoops", int), {},
ValueError,
re.compile(r"invalid literal for int().*: s?'?0xOoops'?"))
def test_parse_num_ok(self):
self.assertEqual(_parse_num("0", int), 0)
self.assertEqual(_parse_num("0x10", int), 16)
self.assertEqual(_parse_num("0XA", int), 10)
self.assertEqual(_parse_num("010", int), 8)
self.assertEqual(_parse_num("0b11", int), 3)
self.assertEqual(_parse_num("0b", int), 0)
def test_numeric_options(self):
self.assertParseOK(["-n", "42", "-l", "0x20"],
{ "n": 42, "l": 0x20 }, [])
self.assertParseOK(["-n", "0b0101", "-l010"],
{ "n": 5, "l": 8 }, [])
self.assertParseFail(["-n008"],
"option -n: invalid integer value: '008'")
self.assertParseFail(["-l0b0123"],
"option -l: invalid integer value: '0b0123'")
self.assertParseFail(["-l", "0x12x"],
"option -l: invalid integer value: '0x12x'")
def test_main():
support.run_unittest(__name__)
if __name__ == '__main__':
test_main()
| gpl-2.0 |
nvoron23/socialite | jython/Lib/encodings/cp857.py | 593 | 34164 | """ Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP857.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp857',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x0131, # LATIN SMALL LETTER DOTLESS I
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x0130, # LATIN CAPITAL LETTER I WITH DOT ABOVE
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
0x009e: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x009f: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x011e, # LATIN CAPITAL LETTER G WITH BREVE
0x00a7: 0x011f, # LATIN SMALL LETTER G WITH BREVE
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x00ae, # REGISTERED SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00b7: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
0x00b8: 0x00a9, # COPYRIGHT SIGN
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x00a2, # CENT SIGN
0x00be: 0x00a5, # YEN SIGN
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
0x00c7: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x00a4, # CURRENCY SIGN
0x00d0: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00d1: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00d2: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00d4: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
0x00d5: None, # UNDEFINED
0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00d8: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x00a6, # BROKEN BAR
0x00de: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00e3: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: None, # UNDEFINED
0x00e8: 0x00d7, # MULTIPLICATION SIGN
0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00ea: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00eb: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
0x00ed: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x00ee: 0x00af, # MACRON
0x00ef: 0x00b4, # ACUTE ACCENT
0x00f0: 0x00ad, # SOFT HYPHEN
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: None, # UNDEFINED
0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
0x00f4: 0x00b6, # PILCROW SIGN
0x00f5: 0x00a7, # SECTION SIGN
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x00b8, # CEDILLA
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x00a8, # DIAERESIS
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x00b9, # SUPERSCRIPT ONE
0x00fc: 0x00b3, # SUPERSCRIPT THREE
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
u'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
u'\xef' # 0x008b -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\u0131' # 0x008d -> LATIN SMALL LETTER DOTLESS I
u'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
u'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
u'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
u'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
u'\u0130' # 0x0098 -> LATIN CAPITAL LETTER I WITH DOT ABOVE
u'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xf8' # 0x009b -> LATIN SMALL LETTER O WITH STROKE
u'\xa3' # 0x009c -> POUND SIGN
u'\xd8' # 0x009d -> LATIN CAPITAL LETTER O WITH STROKE
u'\u015e' # 0x009e -> LATIN CAPITAL LETTER S WITH CEDILLA
u'\u015f' # 0x009f -> LATIN SMALL LETTER S WITH CEDILLA
u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
u'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
u'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
u'\u011e' # 0x00a6 -> LATIN CAPITAL LETTER G WITH BREVE
u'\u011f' # 0x00a7 -> LATIN SMALL LETTER G WITH BREVE
u'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
u'\xae' # 0x00a9 -> REGISTERED SIGN
u'\xac' # 0x00aa -> NOT SIGN
u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
u'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\xc1' # 0x00b5 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0x00b6 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc0' # 0x00b7 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xa9' # 0x00b8 -> COPYRIGHT SIGN
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\xa2' # 0x00bd -> CENT SIGN
u'\xa5' # 0x00be -> YEN SIGN
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\xe3' # 0x00c6 -> LATIN SMALL LETTER A WITH TILDE
u'\xc3' # 0x00c7 -> LATIN CAPITAL LETTER A WITH TILDE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa4' # 0x00cf -> CURRENCY SIGN
u'\xba' # 0x00d0 -> MASCULINE ORDINAL INDICATOR
u'\xaa' # 0x00d1 -> FEMININE ORDINAL INDICATOR
u'\xca' # 0x00d2 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x00d3 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x00d4 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\ufffe' # 0x00d5 -> UNDEFINED
u'\xcd' # 0x00d6 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x00d7 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x00d8 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\xa6' # 0x00dd -> BROKEN BAR
u'\xcc' # 0x00de -> LATIN CAPITAL LETTER I WITH GRAVE
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\xd3' # 0x00e0 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
u'\xd4' # 0x00e2 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd2' # 0x00e3 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xf5' # 0x00e4 -> LATIN SMALL LETTER O WITH TILDE
u'\xd5' # 0x00e5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xb5' # 0x00e6 -> MICRO SIGN
u'\ufffe' # 0x00e7 -> UNDEFINED
u'\xd7' # 0x00e8 -> MULTIPLICATION SIGN
u'\xda' # 0x00e9 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0x00ea -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0x00eb -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xec' # 0x00ec -> LATIN SMALL LETTER I WITH GRAVE
u'\xff' # 0x00ed -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\xaf' # 0x00ee -> MACRON
u'\xb4' # 0x00ef -> ACUTE ACCENT
u'\xad' # 0x00f0 -> SOFT HYPHEN
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\ufffe' # 0x00f2 -> UNDEFINED
u'\xbe' # 0x00f3 -> VULGAR FRACTION THREE QUARTERS
u'\xb6' # 0x00f4 -> PILCROW SIGN
u'\xa7' # 0x00f5 -> SECTION SIGN
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\xb8' # 0x00f7 -> CEDILLA
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\xa8' # 0x00f9 -> DIAERESIS
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\xb9' # 0x00fb -> SUPERSCRIPT ONE
u'\xb3' # 0x00fc -> SUPERSCRIPT THREE
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x00bd, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a4: 0x00cf, # CURRENCY SIGN
0x00a5: 0x00be, # YEN SIGN
0x00a6: 0x00dd, # BROKEN BAR
0x00a7: 0x00f5, # SECTION SIGN
0x00a8: 0x00f9, # DIAERESIS
0x00a9: 0x00b8, # COPYRIGHT SIGN
0x00aa: 0x00d1, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00ad: 0x00f0, # SOFT HYPHEN
0x00ae: 0x00a9, # REGISTERED SIGN
0x00af: 0x00ee, # MACRON
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b3: 0x00fc, # SUPERSCRIPT THREE
0x00b4: 0x00ef, # ACUTE ACCENT
0x00b5: 0x00e6, # MICRO SIGN
0x00b6: 0x00f4, # PILCROW SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00b8: 0x00f7, # CEDILLA
0x00b9: 0x00fb, # SUPERSCRIPT ONE
0x00ba: 0x00d0, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00be: 0x00f3, # VULGAR FRACTION THREE QUARTERS
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c0: 0x00b7, # LATIN CAPITAL LETTER A WITH GRAVE
0x00c1: 0x00b5, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x00b6, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c3: 0x00c7, # LATIN CAPITAL LETTER A WITH TILDE
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c8: 0x00d4, # LATIN CAPITAL LETTER E WITH GRAVE
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00ca: 0x00d2, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00cb: 0x00d3, # LATIN CAPITAL LETTER E WITH DIAERESIS
0x00cc: 0x00de, # LATIN CAPITAL LETTER I WITH GRAVE
0x00cd: 0x00d6, # LATIN CAPITAL LETTER I WITH ACUTE
0x00ce: 0x00d7, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
0x00cf: 0x00d8, # LATIN CAPITAL LETTER I WITH DIAERESIS
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d2: 0x00e3, # LATIN CAPITAL LETTER O WITH GRAVE
0x00d3: 0x00e0, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x00e2, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d5: 0x00e5, # LATIN CAPITAL LETTER O WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00d7: 0x00e8, # MULTIPLICATION SIGN
0x00d8: 0x009d, # LATIN CAPITAL LETTER O WITH STROKE
0x00d9: 0x00eb, # LATIN CAPITAL LETTER U WITH GRAVE
0x00da: 0x00e9, # LATIN CAPITAL LETTER U WITH ACUTE
0x00db: 0x00ea, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e3: 0x00c6, # LATIN SMALL LETTER A WITH TILDE
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x008b, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f5: 0x00e4, # LATIN SMALL LETTER O WITH TILDE
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f8: 0x009b, # LATIN SMALL LETTER O WITH STROKE
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00ff: 0x00ed, # LATIN SMALL LETTER Y WITH DIAERESIS
0x011e: 0x00a6, # LATIN CAPITAL LETTER G WITH BREVE
0x011f: 0x00a7, # LATIN SMALL LETTER G WITH BREVE
0x0130: 0x0098, # LATIN CAPITAL LETTER I WITH DOT ABOVE
0x0131: 0x008d, # LATIN SMALL LETTER DOTLESS I
0x015e: 0x009e, # LATIN CAPITAL LETTER S WITH CEDILLA
0x015f: 0x009f, # LATIN SMALL LETTER S WITH CEDILLA
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| apache-2.0 |
directorlive/oppia | core/domain/exp_domain_test.py | 2 | 41807 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for exploration domain objects and methods defined on them."""
__author__ = 'Sean Lip'
import os
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import param_domain
from core.tests import test_utils
import feconf
import utils
# Dictionary-like data structures within sample YAML must be formatted
# alphabetically to match string equivalence with the YAML generation
# methods tested below.
#
# If evaluating differences in YAML, conversion to dict form via
# utils.dict_from_yaml can isolate differences quickly.
SAMPLE_YAML_CONTENT = (
"""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: %s
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 6
skin_customizations:
panels_contents: {}
states:
%s:
content:
- type: text
value: ''
interaction:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: %s
feedback: []
param_changes: []
id: null
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: New state
feedback: []
param_changes: []
id: null
triggers: []
param_changes: []
states_schema_version: 3
tags: []
""") % (
feconf.DEFAULT_INIT_STATE_NAME, feconf.DEFAULT_INIT_STATE_NAME,
feconf.DEFAULT_INIT_STATE_NAME)
SAMPLE_YAML_CONTENT_WITH_GADGETS = (
"""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: %s
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 6
skin_customizations:
panels_contents:
bottom: []
left:
- customization_args:
characters:
value: 2
floors:
value: 1
title:
value: The Test Gadget!
gadget_id: TestGadget
visible_in_states:
- New state
- Second state
right: []
states:
%s:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: %s
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: New state
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
Second state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: Second state
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
states_schema_version: 3
tags: []
""") % (
feconf.DEFAULT_INIT_STATE_NAME, feconf.DEFAULT_INIT_STATE_NAME,
feconf.DEFAULT_INIT_STATE_NAME)
TEST_GADGETS = {
'TestGadget': {
'dir': os.path.join(feconf.GADGETS_DIR, 'TestGadget')
}
}
class ExplorationDomainUnitTests(test_utils.GenericTestBase):
"""Test the exploration domain object."""
def test_validation(self):
"""Test validation of explorations."""
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id', '', '')
exploration.init_state_name = ''
exploration.states = {}
with self.assertRaisesRegexp(
utils.ValidationError, 'between 1 and 50 characters'):
exploration.validate()
exploration.title = 'Hello #'
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid character #'):
exploration.validate()
exploration.title = 'Title'
with self.assertRaisesRegexp(
utils.ValidationError, 'between 1 and 50 characters'):
exploration.validate()
exploration.category = 'Category'
# Note: If '/' ever becomes a valid state name, ensure that the rule
# editor frontend tenplate is fixed -- it currently uses '/' as a
# sentinel for an invalid state name.
bad_state = exp_domain.State.create_default_state('/')
exploration.states = {'/': bad_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid character / in a state name'):
exploration.validate()
new_state = exp_domain.State.create_default_state('ABC')
new_state.update_interaction_id('TextInput')
# The 'states' property must be a non-empty dict of states.
exploration.states = {}
with self.assertRaisesRegexp(
utils.ValidationError, 'exploration has no states'):
exploration.validate()
exploration.states = {'A string #': new_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid character # in a state name'):
exploration.validate()
exploration.states = {'A string _': new_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid character _ in a state name'):
exploration.validate()
exploration.states = {'ABC': new_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'has no initial state name'):
exploration.validate()
exploration.init_state_name = 'initname'
with self.assertRaisesRegexp(
utils.ValidationError,
r'There is no state in \[\'ABC\'\] corresponding to '
'the exploration\'s initial state name initname.'):
exploration.validate()
exploration.states = {exploration.init_state_name: new_state}
with self.assertRaisesRegexp(
utils.ValidationError, 'destination ABC is not a valid'):
exploration.validate()
exploration.states = {
exploration.init_state_name: exp_domain.State.create_default_state(
exploration.init_state_name)
}
exploration.states[exploration.init_state_name].update_interaction_id(
'TextInput')
exploration.validate()
exploration.language_code = 'fake_code'
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid language_code'):
exploration.validate()
exploration.language_code = 'English'
with self.assertRaisesRegexp(
utils.ValidationError, 'Invalid language_code'):
exploration.validate()
exploration.language_code = 'en'
exploration.validate()
exploration.param_specs = 'A string'
with self.assertRaisesRegexp(
utils.ValidationError, 'param_specs to be a dict'):
exploration.validate()
exploration.param_specs = {
'@': param_domain.ParamSpec.from_dict({'obj_type': 'Int'})
}
with self.assertRaisesRegexp(
utils.ValidationError, 'Only parameter names with characters'):
exploration.validate()
exploration.param_specs = {
'notAParamSpec': param_domain.ParamSpec.from_dict(
{'obj_type': 'Int'})
}
exploration.validate()
init_state = exploration.states[exploration.init_state_name]
init_state.interaction.triggers = ['element']
with self.assertRaisesRegexp(
utils.ValidationError, 'Expected empty triggers list.'):
exploration.validate()
init_state.interaction.triggers = []
exploration.validate()
def test_tag_validation(self):
"""Test validation of exploration tags."""
exploration = exp_domain.Exploration.create_default_exploration(
'exp_id', 'Title', 'Category')
exploration.objective = 'Objective'
exploration.states[exploration.init_state_name].update_interaction_id(
'EndExploration')
exploration.validate()
exploration.tags = 'this should be a list'
with self.assertRaisesRegexp(
utils.ValidationError, 'Expected \'tags\' to be a list'):
exploration.validate()
exploration.tags = [123]
with self.assertRaisesRegexp(
utils.ValidationError, 'to be a string'):
exploration.validate()
exploration.tags = ['abc', 123]
with self.assertRaisesRegexp(
utils.ValidationError, 'to be a string'):
exploration.validate()
exploration.tags = ['']
with self.assertRaisesRegexp(
utils.ValidationError, 'Tags should be non-empty'):
exploration.validate()
exploration.tags = ['123']
with self.assertRaisesRegexp(
utils.ValidationError,
'should only contain lowercase letters and spaces'):
exploration.validate()
exploration.tags = ['ABC']
with self.assertRaisesRegexp(
utils.ValidationError,
'should only contain lowercase letters and spaces'):
exploration.validate()
exploration.tags = [' a b']
with self.assertRaisesRegexp(
utils.ValidationError,
'Tags should not start or end with whitespace'):
exploration.validate()
exploration.tags = ['a b ']
with self.assertRaisesRegexp(
utils.ValidationError,
'Tags should not start or end with whitespace'):
exploration.validate()
exploration.tags = ['a b']
with self.assertRaisesRegexp(
utils.ValidationError,
'Adjacent whitespace in tags should be collapsed'):
exploration.validate()
exploration.tags = ['abc', 'abc']
with self.assertRaisesRegexp(
utils.ValidationError, 'Some tags duplicate each other'):
exploration.validate()
exploration.tags = ['computer science', 'analysis', 'a b c']
exploration.validate()
def test_exploration_skin_and_gadget_validation(self):
"""Test that Explorations including gadgets validate properly."""
exploration = exp_domain.Exploration.from_yaml(
'exp1', 'Title', 'Category', SAMPLE_YAML_CONTENT_WITH_GADGETS)
invalid_gadget_instance = exp_domain.GadgetInstance('bad_ID', [], {})
with self.assertRaisesRegexp(
utils.ValidationError,
'Unknown gadget with ID bad_ID is not in the registry.'):
invalid_gadget_instance.validate()
with self.swap(feconf, 'ALLOWED_GADGETS', TEST_GADGETS):
gadget_instance = exploration.skin_instance.panel_contents_dict[
'left'][0]
# Force a GadgetInstance to require certain state names.
gadget_instance.visible_in_states.extend(['DEF', 'GHI'])
with self.assertRaisesRegexp(
utils.ValidationError,
'Exploration missing required states: DEF, GHI'):
exploration.validate()
def_state = exp_domain.State.create_default_state('DEF')
def_state.update_interaction_id('TextInput')
exploration.states['DEF'] = def_state
with self.assertRaisesRegexp(
utils.ValidationError,
'Exploration missing required state: GHI'):
exploration.validate()
ghi_state = exp_domain.State.create_default_state('GHI')
ghi_state.update_interaction_id('TextInput')
exploration.states['GHI'] = ghi_state
exploration.validate()
gadget_instance.visible_in_states.extend(['GHI'])
with self.assertRaisesRegexp(
utils.ValidationError,
'TestGadget specifies visibility repeatedly for state: GHI'):
exploration.validate()
# Remove duplicate state.
gadget_instance.visible_in_states.pop()
# Adding a panel that doesn't exist in the skin.
exploration.skin_instance.panel_contents_dict[
'non_existent_panel'] = []
with self.assertRaisesRegexp(
utils.ValidationError,
'non_existent_panel panel not found in skin conversation_v1'):
exploration.validate()
def test_exploration_get_gadget_ids(self):
"""Test that Exploration.get_gadget_ids returns apt results."""
exploration_without_gadgets = exp_domain.Exploration.from_yaml(
'An Exploration ID', 'A title', 'Category', SAMPLE_YAML_CONTENT)
self.assertEqual(exploration_without_gadgets.get_gadget_ids(), [])
exploration_with_gadgets = exp_domain.Exploration.from_yaml(
'exp1', 'Title', 'Category', SAMPLE_YAML_CONTENT_WITH_GADGETS)
self.assertEqual(
exploration_with_gadgets.get_gadget_ids(),
['TestGadget']
)
another_gadget = exp_domain.GadgetInstance('AnotherGadget', [], {})
exploration_with_gadgets.skin_instance.panel_contents_dict[
'right'].append(another_gadget)
self.assertEqual(
exploration_with_gadgets.get_gadget_ids(),
['AnotherGadget', 'TestGadget']
)
def test_objective_validation(self):
"""Test that objectives are validated only in 'strict' mode."""
self.save_new_valid_exploration(
'exp_id', 'user@example.com', title='Title', category='Category',
objective='', end_state_name='End')
exploration = exp_services.get_exploration_by_id('exp_id')
exploration.validate()
with self.assertRaisesRegexp(
utils.ValidationError, 'objective must be specified'):
exploration.validate(strict=True)
exploration.objective = 'An objective'
exploration.validate(strict=True)
def test_is_demo_property(self):
"""Test the is_demo property."""
demo = exp_domain.Exploration.create_default_exploration(
'0', 'title', 'category')
self.assertEqual(demo.is_demo, True)
notdemo1 = exp_domain.Exploration.create_default_exploration(
'a', 'title', 'category')
self.assertEqual(notdemo1.is_demo, False)
notdemo2 = exp_domain.Exploration.create_default_exploration(
'abcd', 'title', 'category')
self.assertEqual(notdemo2.is_demo, False)
def test_exploration_export_import(self):
"""Test that to_dict and from_dict preserve all data within an
exploration.
"""
demo = exp_domain.Exploration.create_default_exploration(
'0', 'title', 'category')
demo_dict = demo.to_dict()
exp_from_dict = exp_domain.Exploration.create_exploration_from_dict(
demo_dict)
self.assertEqual(exp_from_dict.to_dict(), demo_dict)
def test_interaction_with_none_id_is_not_terminal(self):
"""Test that an interaction with an id of None leads to is_terminal
being false.
"""
# Default exploration has a default interaction with an ID of None.
demo = exp_domain.Exploration.create_default_exploration(
'0', 'title', 'category')
init_state = demo.states[feconf.DEFAULT_INIT_STATE_NAME]
self.assertFalse(init_state.interaction.is_terminal)
class StateExportUnitTests(test_utils.GenericTestBase):
"""Test export of states."""
def test_export_state_to_dict(self):
"""Test exporting a state to a dict."""
exploration = exp_domain.Exploration.create_default_exploration(
'A different exploration_id', 'A title', 'A category')
exploration.add_states(['New state'])
state_dict = exploration.states['New state'].to_dict()
expected_dict = {
'content': [{
'type': 'text',
'value': u''
}],
'interaction': {
'customization_args': {},
'handlers': [{
'name': u'submit',
'rule_specs': [{
'definition': {
u'rule_type': u'default'
},
'dest': 'New state',
'feedback': [],
'param_changes': [],
}]
}],
'id': None,
'triggers': [],
},
'param_changes': [],
}
self.assertEqual(expected_dict, state_dict)
class YamlCreationUnitTests(test_utils.GenericTestBase):
"""Test creation of explorations from YAML files."""
def test_yaml_import_and_export(self):
"""Test the from_yaml() and to_yaml() methods."""
EXP_ID = 'An exploration_id'
exploration = exp_domain.Exploration.create_default_exploration(
EXP_ID, 'A title', 'A category')
exploration.add_states(['New state'])
self.assertEqual(len(exploration.states), 2)
yaml_content = exploration.to_yaml()
self.assertEqual(yaml_content, SAMPLE_YAML_CONTENT)
exploration2 = exp_domain.Exploration.from_yaml(
'exp2', 'Title', 'Category', yaml_content)
self.assertEqual(len(exploration2.states), 2)
yaml_content_2 = exploration2.to_yaml()
self.assertEqual(yaml_content_2, yaml_content)
with self.assertRaises(Exception):
exp_domain.Exploration.from_yaml(
'exp3', 'Title', 'Category', 'No_initial_state_name')
with self.assertRaises(Exception):
exp_domain.Exploration.from_yaml(
'exp4', 'Title', 'Category',
'Invalid\ninit_state_name:\nMore stuff')
with self.assertRaises(Exception):
exp_domain.Exploration.from_yaml(
'exp4', 'Title', 'Category', 'State1:\n(\nInvalid yaml')
def test_yaml_import_and_export_without_gadgets(self):
"""Test from_yaml() and to_yaml() methods without gadgets."""
EXP_ID = 'An exploration_id'
exploration_without_gadgets = exp_domain.Exploration.from_yaml(
EXP_ID, 'A title', 'Category', SAMPLE_YAML_CONTENT)
yaml_content = exploration_without_gadgets.to_yaml()
self.assertEqual(yaml_content, SAMPLE_YAML_CONTENT)
def test_yaml_import_and_export_with_gadgets(self):
"""Test from_yaml() and to_yaml() methods including gadgets."""
EXP_ID = 'An exploration_id'
exploration_with_gadgets = exp_domain.Exploration.from_yaml(
EXP_ID, 'A title', 'Category', SAMPLE_YAML_CONTENT_WITH_GADGETS)
generated_yaml = exploration_with_gadgets.to_yaml()
generated_yaml_as_dict = utils.dict_from_yaml(generated_yaml)
sample_yaml_as_dict = utils.dict_from_yaml(
SAMPLE_YAML_CONTENT_WITH_GADGETS)
self.assertEqual(generated_yaml_as_dict, sample_yaml_as_dict)
class SchemaMigrationUnitTests(test_utils.GenericTestBase):
"""Test migration methods for yaml content."""
YAML_CONTENT_V1 = (
"""default_skin: conversation_v1
param_changes: []
param_specs: {}
schema_version: 1
states:
- content:
- type: text
value: ''
name: (untitled state)
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
- content:
- type: text
value: ''
name: New state
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
""")
YAML_CONTENT_V2 = (
"""default_skin: conversation_v1
init_state_name: (untitled state)
param_changes: []
param_specs: {}
schema_version: 2
states:
(untitled state):
content:
- type: text
value: ''
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
New state:
content:
- type: text
value: ''
param_changes: []
widget:
customization_args: {}
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
""")
YAML_CONTENT_V3 = (
"""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 3
skill_tags: []
states:
(untitled state):
content:
- type: text
value: ''
param_changes: []
widget:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
New state:
content:
- type: text
value: ''
param_changes: []
widget:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
sticky: false
widget_id: TextInput
""")
YAML_CONTENT_V4 = (
"""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 4
skill_tags: []
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
param_changes: []
""")
YAML_CONTENT_V5 = (
"""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 5
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
param_changes: []
tags: []
""")
YAML_CONTENT_V6 = (
"""author_notes: ''
blurb: ''
default_skin: conversation_v1
init_state_name: (untitled state)
language_code: en
objective: ''
param_changes: []
param_specs: {}
schema_version: 6
skin_customizations:
panels_contents: {}
states:
(untitled state):
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: (untitled state)
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
END:
content:
- type: text
value: Congratulations, you have finished!
interaction:
customization_args:
recommendedExplorationIds:
value: []
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: EndExploration
triggers: []
param_changes: []
New state:
content:
- type: text
value: ''
interaction:
customization_args:
placeholder:
value: ''
rows:
value: 1
handlers:
- name: submit
rule_specs:
- definition:
rule_type: default
dest: END
feedback: []
param_changes: []
id: TextInput
triggers: []
param_changes: []
states_schema_version: 3
tags: []
""")
_LATEST_YAML_CONTENT = YAML_CONTENT_V6
def test_load_from_v1(self):
"""Test direct loading from a v1 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', 'A title', 'A category', self.YAML_CONTENT_V1)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v2(self):
"""Test direct loading from a v2 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', 'A title', 'A category', self.YAML_CONTENT_V2)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v3(self):
"""Test direct loading from a v3 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', 'A title', 'A category', self.YAML_CONTENT_V3)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v4(self):
"""Test direct loading from a v4 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', 'A title', 'A category', self.YAML_CONTENT_V4)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v5(self):
"""Test direct loading from a v5 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', 'A title', 'A category', self.YAML_CONTENT_V5)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v6(self):
"""Test direct loading from a v6 yaml file."""
exploration = exp_domain.Exploration.from_yaml(
'eid', 'A title', 'A category', self.YAML_CONTENT_V6)
self.assertEqual(exploration.to_yaml(), self._LATEST_YAML_CONTENT)
class ConversionUnitTests(test_utils.GenericTestBase):
"""Test conversion methods."""
def test_convert_exploration_to_player_dict(self):
EXP_TITLE = 'A title'
SECOND_STATE_NAME = 'first state'
exploration = exp_domain.Exploration.create_default_exploration(
'eid', EXP_TITLE, 'A category')
exploration.add_states([SECOND_STATE_NAME])
def _get_default_state_dict(content_str, dest_name):
return {
'content': [{
'type': 'text',
'value': content_str,
}],
'interaction': {
'customization_args': {},
'handlers': [{
'name': 'submit',
'rule_specs': [{
'definition': {
'rule_type': 'default',
},
'dest': dest_name,
'feedback': [],
'param_changes': [],
}],
}],
'id': None,
'triggers': [],
},
'param_changes': [],
}
self.assertEqual(exploration.to_player_dict(), {
'init_state_name': feconf.DEFAULT_INIT_STATE_NAME,
'title': EXP_TITLE,
'states': {
feconf.DEFAULT_INIT_STATE_NAME: _get_default_state_dict(
feconf.DEFAULT_INIT_STATE_CONTENT_STR,
feconf.DEFAULT_INIT_STATE_NAME),
SECOND_STATE_NAME: _get_default_state_dict(
'', SECOND_STATE_NAME),
},
'param_changes': [],
'param_specs': {},
'skin_customizations': feconf.DEFAULT_SKIN_CUSTOMIZATIONS,
})
class StateOperationsUnitTests(test_utils.GenericTestBase):
"""Test methods operating on states."""
def test_delete_state(self):
"""Test deletion of states."""
exploration = exp_domain.Exploration.create_default_exploration(
'eid', 'A title', 'A category')
exploration.add_states(['first state'])
with self.assertRaisesRegexp(
ValueError, 'Cannot delete initial state'):
exploration.delete_state(exploration.init_state_name)
exploration.add_states(['second state'])
exploration.delete_state('second state')
with self.assertRaisesRegexp(ValueError, 'fake state does not exist'):
exploration.delete_state('fake state')
def test_state_operations(self):
"""Test adding, updating and checking existence of states."""
exploration = exp_domain.Exploration.create_default_exploration(
'eid', 'A title', 'A category')
with self.assertRaises(KeyError):
exploration.states['invalid_state_name']
self.assertEqual(len(exploration.states), 1)
default_state_name = exploration.init_state_name
exploration.rename_state(default_state_name, 'Renamed state')
self.assertEqual(len(exploration.states), 1)
self.assertEqual(exploration.init_state_name, 'Renamed state')
# Add a new state.
exploration.add_states(['State 2'])
self.assertEqual(len(exploration.states), 2)
# It is OK to rename a state to the same name.
exploration.rename_state('State 2', 'State 2')
# But it is not OK to add or rename a state using a name that already
# exists.
with self.assertRaisesRegexp(ValueError, 'Duplicate state name'):
exploration.add_states(['State 2'])
with self.assertRaisesRegexp(ValueError, 'Duplicate state name'):
exploration.rename_state('State 2', 'Renamed state')
# And it is OK to rename a state to 'END' (old terminal pseudostate). It
# is tested throughout this test because a lot of old behavior used to
# be specific to states named 'END'. These tests validate that is no
# longer the situation.
exploration.rename_state('State 2', 'END')
# Should successfully be able to name it back.
exploration.rename_state('END', 'State 2')
# The exploration now has exactly two states.
self.assertNotIn(default_state_name, exploration.states)
self.assertIn('Renamed state', exploration.states)
self.assertIn('State 2', exploration.states)
# Can successfully add 'END' state
exploration.add_states(['END'])
# Should fail to rename like any other state
with self.assertRaisesRegexp(ValueError, 'Duplicate state name'):
exploration.rename_state('State 2', 'END')
# Ensure the other states are connected to END
exploration.states['Renamed state'].interaction.handlers[
0].rule_specs[0].dest = 'State 2'
exploration.states['State 2'].interaction.handlers[
0].rule_specs[0].dest = 'END'
# Ensure the other states have interactions
exploration.states['Renamed state'].update_interaction_id('TextInput')
exploration.states['State 2'].update_interaction_id('TextInput')
# Other miscellaneous requirements for validation
exploration.objective = 'Objective'
# The exploration should NOT be terminable even though it has a state
# called 'END' and everything else is connected to it.
with self.assertRaises(Exception):
exploration.validate(strict=True)
# Renaming the node to something other than 'END' and giving it an
# EndExploration is enough to validate it
exploration.rename_state('END', 'AnotherEnd')
exploration.states['AnotherEnd'].update_interaction_id('EndExploration')
exploration.validate(strict=True)
# Name it back for final tests
exploration.rename_state('AnotherEnd', 'END')
# Should be able to successfully delete it
exploration.delete_state('END')
self.assertNotIn('END', exploration.states)
class SkinInstanceUnitTests(test_utils.GenericTestBase):
"""Test methods for SkinInstance."""
_SAMPLE_SKIN_INSTANCE_DICT = {
'skin_id': 'conversation_v1',
'skin_customizations': {
'panels_contents': {
'bottom': [],
'left': [
{
'customization_args': {
'characters': {'value': 2},
'floors': {'value': 1},
'title': {'value': 'The Test Gadget!'}},
'gadget_id': 'TestGadget',
'visible_in_states': ['New state', 'Second state']
}
],
'right': []
}
}
}
def test_get_state_names_required_by_gadgets(self):
"""Test accurate computation of state_names_required_by_gadgets."""
skin_instance = exp_domain.SkinInstance(
'conversation_v1',
self._SAMPLE_SKIN_INSTANCE_DICT['skin_customizations'])
self.assertEqual(
skin_instance.get_state_names_required_by_gadgets(),
['New state', 'Second state'])
def test_conversion_of_skin_to_and_from_dict(self):
"""Tests conversion of SkinInstance to and from dict representations."""
exploration = exp_domain.Exploration.from_yaml(
'exp1', 'Title', 'Category', SAMPLE_YAML_CONTENT_WITH_GADGETS)
skin_instance = exploration.skin_instance
skin_instance_as_dict = skin_instance.to_dict()
self.assertEqual(
skin_instance_as_dict,
self._SAMPLE_SKIN_INSTANCE_DICT)
skin_instance_as_instance = exp_domain.SkinInstance.from_dict(
skin_instance_as_dict)
self.assertEqual(skin_instance_as_instance.skin_id, 'conversation_v1')
self.assertEqual(
sorted(skin_instance_as_instance.panel_contents_dict.keys()),
['bottom', 'left', 'right'])
class GadgetInstanceUnitTests(test_utils.GenericTestBase):
"""Tests methods instantiating and validating GadgetInstances."""
def test_gadget_instantiation(self):
"""Test instantiation of GadgetInstances."""
exploration = exp_domain.Exploration.from_yaml(
'exp1', 'Title', 'Category', SAMPLE_YAML_CONTENT_WITH_GADGETS)
# Assert left and bottom panels have 1 GadgetInstance. Right has 0.
self.assertEqual(len(exploration.skin_instance.panel_contents_dict[
'left']), 1)
self.assertEqual(len(exploration.skin_instance.panel_contents_dict[
'bottom']), 0)
self.assertEqual(len(exploration.skin_instance.panel_contents_dict[
'right']), 0)
def test_gadget_instance_properties(self):
"""Test accurate representation of gadget properties."""
exploration = exp_domain.Exploration.from_yaml(
'exp1', 'Title', 'Category', SAMPLE_YAML_CONTENT_WITH_GADGETS)
panel_contents_dict = exploration.skin_instance.panel_contents_dict
with self.swap(feconf, 'ALLOWED_GADGETS', TEST_GADGETS):
test_gadget_instance = panel_contents_dict['left'][0]
self.assertEqual(test_gadget_instance.height, 50)
self.assertEqual(test_gadget_instance.width, 60)
self.assertEqual(
test_gadget_instance.customization_args['title']['value'],
'The Test Gadget!')
self.assertIn('New state', test_gadget_instance.visible_in_states)
def test_gadget_instance_validation(self):
"""Test validation of GadgetInstance."""
exploration = exp_domain.Exploration.from_yaml(
'exp1', 'Title', 'Category', SAMPLE_YAML_CONTENT_WITH_GADGETS)
panel_contents_dict = exploration.skin_instance.panel_contents_dict
with self.swap(feconf, 'ALLOWED_GADGETS', TEST_GADGETS):
test_gadget_instance = panel_contents_dict['left'][0]
# Validation against sample YAML should pass without error.
exploration.validate()
# Assert size exceeded error triggers when a gadget's size exceeds
# a panel's capacity.
with self.swap(
test_gadget_instance.gadget,
'_PIXEL_WIDTH_PER_CHARACTER',
2300):
with self.assertRaisesRegexp(
utils.ValidationError,
'Size exceeded: left panel width of 4600 exceeds limit of 100'):
exploration.validate()
# Assert internal validation against CustomizationArgSpecs.
test_gadget_instance.customization_args['floors']['value'] = 5
with self.assertRaisesRegexp(
utils.ValidationError,
'TestGadgets are limited to 3 floors, found 5.'):
test_gadget_instance.validate()
test_gadget_instance.customization_args['floors']['value'] = 1
# Assert that too many gadgets in a panel raise a ValidationError.
panel_contents_dict['left'].append(test_gadget_instance)
with self.assertRaisesRegexp(
utils.ValidationError,
"'left' panel expected at most 1 gadget, but 2 gadgets are visible in state 'New state'."):
exploration.validate()
# Assert that an error is raised when a gadget is not visible in any
# states.
test_gadget_instance.visible_in_states = []
with self.assertRaisesRegexp(
utils.ValidationError,
'TestGadget gadget not visible in any states.'):
test_gadget_instance.validate()
def test_conversion_of_gadget_instance_to_and_from_dict(self):
"""Test conversion of GadgetInstance to and from dict. """
exploration = exp_domain.Exploration.from_yaml(
'exp1', 'Title', 'Category', SAMPLE_YAML_CONTENT_WITH_GADGETS)
panel_contents_dict = exploration.skin_instance.panel_contents_dict
test_gadget_instance = panel_contents_dict['left'][0]
test_gadget_as_dict = test_gadget_instance.to_dict()
self.assertEqual(
test_gadget_as_dict,
{
'gadget_id': 'TestGadget',
'visible_in_states': ['New state', 'Second state'],
'customization_args': {
'title': {
'value': 'The Test Gadget!'
},
'characters': {
'value': 2
},
'floors': {
'value': 1
}
}
}
)
test_gadget_as_instance = exp_domain.GadgetInstance.from_dict(
test_gadget_as_dict)
self.assertEqual(test_gadget_as_instance.width, 60)
self.assertEqual(test_gadget_as_instance.height, 50)
self.assertEqual(
test_gadget_as_instance.customization_args['title']['value'],
'The Test Gadget!'
)
| apache-2.0 |
hakatashi/youtube-dl | youtube_dl/extractor/phoenix.py | 61 | 1506 | from __future__ import unicode_literals
from .dreisat import DreiSatIE
class PhoenixIE(DreiSatIE):
IE_NAME = 'phoenix.de'
_VALID_URL = r'''(?x)https?://(?:www\.)?phoenix\.de/content/
(?:
phoenix/die_sendungen/(?:[^/]+/)?
)?
(?P<id>[0-9]+)'''
_TESTS = [
{
'url': 'http://www.phoenix.de/content/884301',
'md5': 'ed249f045256150c92e72dbb70eadec6',
'info_dict': {
'id': '884301',
'ext': 'mp4',
'title': 'Michael Krons mit Hans-Werner Sinn',
'description': 'Im Dialog - Sa. 25.10.14, 00.00 - 00.35 Uhr',
'upload_date': '20141025',
'uploader': 'Im Dialog',
}
},
{
'url': 'http://www.phoenix.de/content/phoenix/die_sendungen/869815',
'only_matching': True,
},
{
'url': 'http://www.phoenix.de/content/phoenix/die_sendungen/diskussionen/928234',
'only_matching': True,
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
internal_id = self._search_regex(
r'<div class="phx_vod" id="phx_vod_([0-9]+)"',
webpage, 'internal video ID')
api_url = 'http://www.phoenix.de/php/mediaplayer/data/beitrags_details.php?ak=web&id=%s' % internal_id
return self.extract_from_xml_url(video_id, api_url)
| unlicense |
google-research/google-research | summae/human_and_extractive.py | 1 | 8614 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Human and extractive baseline evaluation.
human_and_extractive \
--data_dir=$ROCSTORIES_DATA \
--eval_subset=test
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
from absl import logging
import numpy as np
import six
from six.moves import range
import tensorflow.compat.v1 as tf
from rouge import rouge_scorer
from rouge import scoring
from summae import p2s_eval
from summae import util
FLAGS = flags.FLAGS
flags.DEFINE_string('data_dir', '.', 'Data directory.')
flags.DEFINE_string('eval_subset', 'test',
'which subset (valid/test) to eval/decode.')
flags.DEFINE_string('output_dir', '/tmp/12342',
'local directory to save extractive oracle')
flags.DEFINE_string('vocab_file', '',
'Subword vocab file.') # for detok first sentence
my_rouge_scorer = rouge_scorer.RougeScorer(['rouge1', 'rouge2', 'rougeL'],
use_stemmer=True)
def get_extracts(s):
# get 5 sentences as the extractive baselines
sents = s.feature_lists.feature_list['untokenized_sentences'].feature
assert len(sents) == 5
return tuple([sents[i].bytes_list.value[0] for i in range(5)])
def human_ave(summ_list):
"""Average pairwise rouge between two human summaries."""
agg = scoring.BootstrapAggregator()
for s1_id, s1 in enumerate(summ_list):
for s2_id, s2 in enumerate(summ_list):
if s1_id >= s2_id: # only compute for s1_id < s2_id
continue
s2_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(s2), p2s_eval.TRUNC_LEN)
s1_s2_trunc_score = my_rouge_scorer.score(s1, s2_trunc)
agg.add_scores(s1_s2_trunc_score)
agg_ave = agg.aggregate()
score_ave = {
rouge_type: agg_ave[rouge_type].mid for rouge_type in agg_ave # mid=mean
}
nwords_ave = np.mean([p2s_eval.count_words(s) for s in summ_list])
return (score_ave, nwords_ave)
def human_max(summ_list):
"""Maximum pairwise rouge between any two human summaries."""
score_max = None
rouge_1r_trunc_max = 0
for s1_id, s1 in enumerate(summ_list):
for s2_id, s2 in enumerate(summ_list):
if s1_id >= s2_id:
continue
s2_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(s2), p2s_eval.TRUNC_LEN)
s1_s2_trunc_score = my_rouge_scorer.score(s1, s2_trunc)
if s1_s2_trunc_score['rouge1'].recall >= rouge_1r_trunc_max:
score_max = s1_s2_trunc_score
rouge_1r_trunc_max = s1_s2_trunc_score['rouge1'].recall
nwords_max = np.max([p2s_eval.count_words(s) for s in summ_list])
return (score_max, nwords_max)
def extract_ave(e, summ_list):
"""Average rouge between ith sentence and human summaries."""
agg = scoring.BootstrapAggregator()
e_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(e),
p2s_eval.TRUNC_LEN) # get_summary_first_sentence may not be necessary
for s in summ_list:
s_e_trunc_score = my_rouge_scorer.score(s, e_trunc)
agg.add_scores(s_e_trunc_score)
agg_ave = agg.aggregate()
score_ave = {
rouge_type: agg_ave[rouge_type].mid for rouge_type in agg_ave # mid=mean
}
nwords_e = p2s_eval.count_words(e)
return (score_ave, nwords_e)
def extract_oracle(extract_list, summ_list):
"""Choose sentence with maximum average rouge."""
# Choose sentence with maximum average rouge.
score_accum = []
for e in extract_list:
e_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(e),
p2s_eval.TRUNC_LEN) # get_summary_first_sentence may not be necessary
accum_rouge_1r_trunc = 0
for s in summ_list:
s_e_trunc_score = my_rouge_scorer.score(s, e_trunc)
# for computing accumulative rouge
accum_rouge_1r_trunc += s_e_trunc_score['rouge1'].recall
score_accum.append(accum_rouge_1r_trunc)
e_id_o = np.argmax(score_accum)
e_o = extract_list[e_id_o]
# Compute average rouge for the oracle sentence
agg = scoring.BootstrapAggregator()
e_o_trunc = p2s_eval.get_summary_truncated(
p2s_eval.get_summary_first_sentence(e_o),
p2s_eval.TRUNC_LEN) # get_summary_first_sentence may not be necessary
for s in summ_list:
e_o_trunc_score = my_rouge_scorer.score(s, e_o_trunc)
agg.add_scores(e_o_trunc_score)
agg_o = agg.aggregate()
score_o = {
rouge_type: agg_o[rouge_type].mid for rouge_type in agg_o # mid=mean
}
nwords_o = p2s_eval.count_words(e_o)
return (score_o, nwords_o, e_o)
def print_agg_score(label, agg, nwords):
print(
'%s: \n\t rouge-1r-trunc20=%.3f \t rouge-Lr-trunc20=%.3f \t nwords=%.1f' %
(label, agg.aggregate()['rouge1'].mid.recall,
agg.aggregate()['rougeL'].mid.recall, np.mean(nwords)))
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
tf.io.gfile.mkdir(FLAGS.output_dir)
data_file = os.path.join(
FLAGS.data_dir,
'rocstories_gt.' + six.ensure_str(FLAGS.eval_subset) + '.tfrecord')
seq_ex_list = util.get_seq_exs(data_file)
print('Input data %s' % data_file)
# Human summary baselines.
# We have 3 human summaries for each example, and
# 2 human performance variants:
# 1. 'a': average pairwise rouge between two summaries
# 2. 'm': maximum pairwise rouge between any two summaries
agg_human = {}
nwords_human = {}
for h in ['a', 'm']:
agg_human[h] = scoring.BootstrapAggregator()
nwords_human[h] = []
# Extractive baselines
# 1. '1','2','3','4','5': rouge between ith sentence and human summary
# 2. 'o': for each example, choose sentence with maximum average rouge
agg_extract = {}
nwords_extract = {}
for e in [str(x) for x in list(range(5))] + ['o']:
agg_extract[e] = scoring.BootstrapAggregator()
nwords_extract[e] = []
# human performance
sent2oracle = {}
for ex in seq_ex_list:
summ_list = p2s_eval.get_summaries(ex)
summ_list = [x.decode('utf-8') for x in summ_list]
# human eval
score, nwords = human_ave(summ_list)
agg_human['a'].add_scores(score)
nwords_human['a'].append(nwords)
score, nwords = human_max(summ_list)
agg_human['m'].add_scores(score)
nwords_human['m'].append(nwords)
# extractive eval
extract_list = get_extracts(ex)
extract_list = [x.decode('utf-8') for x in extract_list]
for e_id, e in enumerate(extract_list):
score, nwords = extract_ave(e, summ_list)
agg_extract[str(e_id)].add_scores(score)
nwords_extract[str(e_id)].append(nwords)
score, nwords, e_o = extract_oracle(extract_list, summ_list)
agg_extract['o'].add_scores(score)
nwords_extract['o'].append(nwords)
# save story and oracle sentence for future use
first = p2s_eval.get_first_sentence(ex)
if first in sent2oracle:
logging.fatal('duplicate first sentence: %s', str(first))
sent2oracle[first] = (' '.join(extract_list), e_o) # (story, oracle)
# write each example and the corresponding oracle to disk
tk, _ = util.get_tokenizer_with_special(FLAGS.vocab_file, [])
def detok(s):
return tk.decode(util.strip_after_eos(s))
keys_sorted = sorted(sent2oracle.keys(), key=detok)
out_file = os.path.join(
FLAGS.output_dir, 'rocstories_gt.' + six.ensure_str(FLAGS.eval_subset) +
'.firstsent2oracle.txt')
with tf.gfile.Open(out_file, 'w') as f:
for k in keys_sorted:
f.write('%s\n' % (sent2oracle[k][1]))
# print out rouge scores for human performance
print_agg_score('human average', agg_human['a'], nwords_human['a'])
print_agg_score('human max', agg_human['m'], nwords_human['m'])
for e_id in range(5):
print_agg_score('extractive baseline{}'.format(e_id),
agg_extract[str(e_id)], nwords_extract[str(e_id)])
print_agg_score('extractive oracle', agg_extract['o'], nwords_extract['o'])
if __name__ == '__main__':
app.run(main)
| apache-2.0 |
fivejjs/PTVS | Python/Product/PythonTools/Templates/Projects/WebRoleBottle/app.py | 36 | 1223 | """
This script runs the application using a development server.
It contains the definition of routes and views for the application.
"""
import os
import sys
from bottle import default_app, redirect, route, template
if '--debug' in sys.argv[1:] or 'SERVER_DEBUG' in os.environ:
# Debug mode will enable more verbose output in the console window.
# It must be set at the beginning of the script.
import bottle
bottle.debug(True)
@route('/')
def hello():
"""Renders a sample page."""
redirect('/hello/world')
@route('/hello/<name>')
def example(name):
"""Renders a sample page with the name specified in the URL."""
return template('<b>Hello {{name}}</b>!', name=name)
def wsgi_app():
"""Returns the application to make available through wfastcgi. This is used
when the site is published to Microsoft Azure."""
return default_app()
if __name__ == '__main__':
# Starts a local test server.
HOST = os.environ.get('SERVER_HOST', 'localhost')
try:
PORT = int(os.environ.get('SERVER_PORT', '5555'))
except ValueError:
PORT = 5555
import bottle
bottle.run(server='wsgiref', host=HOST, port=PORT)
| apache-2.0 |
jpvanhal/cloudsizzle | cloudsizzle/asi/server.py | 1 | 11139 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2009-2010 CloudSizzle Team
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import logging
from cloudsizzle.kp import SIBConnection, Triple, bnode, uri
from cloudsizzle import settings
from cloudsizzle.asi.importer import user_to_rdf
from cloudsizzle.asi.service import AbstractService, \
ASIServiceKnowledgeProcessor
from cloudsizzle.asi.asi_friends_connection import \
ASIFriendsConnection as ASIConnection
LOG = logging.getLogger('cloudsizzle.asi.server')
PEOPLE_BASE_URI = 'http://cos.alpha.sizl.org/people/'
class SessionStore(object):
def __init__(self):
self._sessions = {}
def __del__(self):
for ac in self._sessions.itervalues():
ac.close()
def __getitem__(self, key):
return self._sessions[key]
def login(self, username, password):
msg = "Logging in to ASI with username '{0}' and password '{1}'."
LOG.debug(msg.format(username, password))
ac = ASIConnection(
base_url=settings.ASI_BASE_URL,
app_name=settings.ASI_APP_NAME,
app_password=settings.ASI_APP_PASSWORD,
username=username,
password=password)
response = ac.open()
try:
user_id = response['entry']['user_id']
self._sessions[user_id] = ac
LOG.debug("Logged in with user_id {0}!".format(user_id))
return ac.session['entry']
except KeyError:
ac.close()
LOG.warning("Logging in failed: {0}".format(response['messages']))
return response
def logout(self, user_id):
LOG.debug('Logging out user with user_id {0}.'.format(user_id))
try:
ac = self._sessions[user_id]
ac.close()
del self._sessions[user_id]
except KeyError:
msg = 'Logging out failed: user {0} was not logged in.'
LOG.warning(msg.format(user_id))
class AbstractServer(AbstractService):
"""Abstract base class for building the server side of a request-response
type service.
AbstractServer subscribes to service requests and provides a method for
responding to these requests.
"""
def __init__(self, sc):
super(AbstractServer, self).__init__(sc)
@property
def query_triple(self):
return Triple(None, 'rdf:type', self.request_type)
def respond(self, request_id, response):
"""Respond to a service request.
request_id -- The ID of the service request.
response -- A dict containing the response data.
"""
response['rdf:type'] = self.response_type
response['response_to'] = uri(request_id)
LOG.debug(
'Responding to request {0} with {1}.'.format(request_id, response))
response_triples = []
for key, values in response.iteritems():
if not isinstance(values, list):
values = [values]
for value in values:
response_triples.append(Triple(bnode('id'), key, value))
self.sc.insert(response_triples)
class LoginServer(AbstractServer):
def __init__(self, sc, session_store):
super(LoginServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'Login'
def process(self, id_, data):
response = self.session_store.login(data['username'],
data['password'])
self.respond(id_, response)
class LogoutServer(AbstractServer):
def __init__(self, sc, session_store):
super(LogoutServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'Logout'
def process(self, id_, data):
self.session_store.logout(data['user_id'])
class RegisterServer(AbstractServer):
def __init__(self, sc):
super(RegisterServer, self).__init__(sc)
@property
def name(self):
return 'Register'
def process(self, id_, data):
with ASIConnection(
base_url=settings.ASI_BASE_URL,
app_name=settings.ASI_APP_NAME,
app_password=settings.ASI_APP_PASSWORD) as ac:
user_info = ac.create_user(
username=data['username'],
password=data['password'],
email=data['email'])
if 'messages' not in user_info:
# Copy user info from ASI to SIB.
triples = user_to_rdf(user_info)
self.sc.insert(triples)
user_id = user_info['id']
response = {'user_id': user_id}
else:
messages = user_info['messages']
response = {'messages': messages}
self.respond(id_, response)
class RejectFriendRequestServer(AbstractServer):
def __init__(self, sc, session_store):
super(RejectFriendRequestServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'RejectFriendRequest'
def process(self, id_, data):
user_id = str(data['user_id'])
friend_id = str(data['friend_id'])
try:
ac = self.session_store[user_id]
except KeyError, e:
print e
response = {'messages': 'did not login ASi'}
else:
result = ac.reject_friend_request(friend_id)
user_uri = '%sID#%s' % (PEOPLE_BASE_URI, user_id)
friend_uri = '%sID#%s' % (PEOPLE_BASE_URI, friend_id)
# Remove from my view
remove_triple = Triple(
user_uri,
uri('http://cos.alpha.sizl.org/people#PendingFriend'),
friend_uri)
self.sc.remove(remove_triple)
response = {'result': str(result)}
self.respond(id_, response)
class RemoveFriendsServer(AbstractServer):
def __init__(self, sc, session_store):
super(RemoveFriendsServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'RemoveFriends'
def process(self, id_, data):
user_id = str(data['user_id'])
friend_id = str(data['friend_id'])
try:
ac = self.session_store[user_id]
except KeyError, e:
print e
response = {'messages': 'did not login ASi'}
else:
ac.remove_friend(friend_id)
user_uri = '%sID#%s' % (PEOPLE_BASE_URI, user_id)
friend_uri = '%sID#%s' % (PEOPLE_BASE_URI, friend_id)
# Remove from my view
remove_triple1 = Triple(
user_uri,
uri('http://cos.alpha.sizl.org/people#Friend'),
friend_uri)
# Remove from my friend's view
remove_triple2 = Triple(
friend_uri,
uri('http://cos.alpha.sizl.org/people#Friend'),
user_uri)
result = self.sc.remove([remove_triple1, remove_triple2])
response = {'result': str(result)}
self.respond(id_, response)
class AddFriendsServer(AbstractServer):
def __init__(self, sc, session_store):
super(AddFriendsServer, self).__init__(sc)
self.session_store = session_store
@property
def name(self):
return 'AddFriends'
def process(self, id_, data):
user_id = str(data['user_id'])
friend_id = str(data['friend_id'])
try:
ac = self.session_store[user_id]
except KeyError, e:
print e
response = {'messages': 'did not login ASi'}
else:
pending_friends = ac.get_pending_friend_requests()
my_pending_friend_list = []
try:
for pending_friend in pending_friends['entry']:
my_pending_friend_list.append(pending_friend['id'])
except KeyError, e:
print e
result = ac.add_friend(friend_id)
response = {'result': str(result)}
if friend_id in my_pending_friend_list:
user_uri = '%sID#%s' % (PEOPLE_BASE_URI, user_id)
friend_uri = '%sID#%s' % (PEOPLE_BASE_URI, friend_id)
# Remove from my view
remove_triple = Triple(
user_uri,
uri('http://cos.alpha.sizl.org/people#PendingFriend'),
friend_uri)
self.sc.remove(remove_triple)
# Add to friend's view
insert_triple1 = Triple(
friend_uri,
uri('http://cos.alpha.sizl.org/people#Friend'),
user_uri)
# Add to my view
insert_triple2 = Triple(
user_uri,
uri('http://cos.alpha.sizl.org/people#Friend'),
friend_uri)
self.sc.insert([insert_triple1, insert_triple2])
else:
user_uri = '%sID#%s' % (PEOPLE_BASE_URI, user_id)
friend_uri = '%sID#%s' % (PEOPLE_BASE_URI, friend_id)
# Add to friend's view
insert_triple = Triple(
friend_uri,
uri('http://cos.alpha.sizl.org/people#PendingFriend'),
user_uri)
self.sc.insert(insert_triple)
self.respond(id_, response)
def main():
session_store = SessionStore()
with SIBConnection('ASI service server', method='preconfigured') as sc:
services = (
LoginServer(sc, session_store),
LogoutServer(sc, session_store),
RegisterServer(sc),
AddFriendsServer(sc, session_store),
RemoveFriendsServer(sc, session_store),
RejectFriendRequestServer(sc, session_store),
)
asi_server_kp = ASIServiceKnowledgeProcessor(services)
asi_server_kp.start()
try:
raw_input('Press enter to stop.\n')
finally:
asi_server_kp.stop()
if __name__ == '__main__':
main()
| mit |
yaroslavvb/tensorflow | tensorflow/contrib/learn/python/learn/datasets/base_test.py | 136 | 3072 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.datasets import base
from tensorflow.python.platform import test
mock = test.mock
_TIMEOUT = IOError(110, "timeout")
class BaseTest(test.TestCase):
"""Test load csv functions."""
def testUrlretrieveRetriesOnIOError(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
_TIMEOUT, _TIMEOUT, _TIMEOUT, _TIMEOUT, _TIMEOUT, None
]
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert full backoff was tried
actual_list = [arg[0][0] for arg in mock_time.sleep.call_args_list]
expected_list = [1, 2, 4, 8, 16]
for actual, expected in zip(actual_list, expected_list):
self.assertLessEqual(abs(actual - expected), 0.25 * expected)
self.assertEquals(len(actual_list), len(expected_list))
def testUrlretrieveRaisesAfterRetriesAreExhausted(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
_TIMEOUT,
]
with self.assertRaises(IOError):
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert full backoff was tried
actual_list = [arg[0][0] for arg in mock_time.sleep.call_args_list]
expected_list = [1, 2, 4, 8, 16]
for actual, expected in zip(actual_list, expected_list):
self.assertLessEqual(abs(actual - expected), 0.25 * expected)
self.assertEquals(len(actual_list), len(expected_list))
def testUrlretrieveRaisesOnNonRetriableErrorWithoutRetry(self):
with mock.patch.object(base, "time") as mock_time:
with mock.patch.object(base, "urllib") as mock_urllib:
mock_urllib.request.urlretrieve.side_effect = [
IOError(2, "No such file or directory"),
]
with self.assertRaises(IOError):
base.urlretrieve_with_retry("http://dummy.com", "/tmp/dummy")
# Assert no retries
self.assertFalse(mock_time.called)
if __name__ == "__main__":
test.main()
| apache-2.0 |
vijeth-aradhya/coala-bears | bears/python/YapfBear.py | 5 | 7698 | from yapf.yapflib.yapf_api import FormatCode
from coalib.bearlib import deprecate_settings
from coalib.bearlib.spacing.SpacingHelper import SpacingHelper
from coalib.bears.LocalBear import LocalBear
from dependency_management.requirements.PipRequirement import PipRequirement
from coala_utils.ContextManagers import prepare_file
from coalib.results.Result import Result
from coalib.results.Diff import Diff
class YapfBear(LocalBear):
"""
Check and correct formatting of Python code using ``yapf`` utility.
See <https://github.com/google/yapf> for more information.
"""
LANGUAGES = {'Python', 'Python 2', 'Python 3'}
AUTHORS = {'The coala developers'}
REQUIREMENTS = {PipRequirement('yapf', '0.14.0')}
AUTHORS_EMAILS = {'coala-devel@googlegroups.com'}
LICENSE = 'AGPL-3.0'
CAN_FIX = {'Formatting'}
ASCIINEMA_URL = 'https://asciinema.org/a/89021'
@deprecate_settings(indent_size='tab_width')
def run(self, filename, file,
max_line_length: int=79,
indent_size: int=SpacingHelper.DEFAULT_TAB_WIDTH,
allow_multiline_lambdas: bool=False,
blank_line_before_nested_class_or_def: bool=False,
continuation_tab_width: int=SpacingHelper.DEFAULT_TAB_WIDTH,
dedent_closing_brackets: bool=False,
indent_dictionary_value: bool=False,
coalesce_brackets: bool=False,
join_multiple_lines: bool=True,
spaces_around_power_operator: bool=True,
spaces_before_comment: int=2,
space_between_ending_comma_and_closing_bracket: bool=True,
split_arguments_when_comma_terminated: bool=False,
split_before_bitwise_operator: bool=False,
split_before_first_argument: bool=False,
split_before_logical_operator: bool=False,
split_before_named_assigns: bool=True,
use_spaces: bool=True,
based_on_style: str='pep8',
prefer_line_break_after_opening_bracket: bool=True):
"""
:param max_line_length:
Maximum number of characters for a line.
:param indent_size:
Number of spaces per indentation level.
:param allow_multiline_lambdas:
Allows lambdas to be formatted on more than one line.
:param blank_line_before_nested_class_or_def:
Inserts a blank line before a ``def`` or ``class`` immediately
nested within another ``def`` or ``class``.
:param continuation_tab_width:
Indent width used for line continuations.
:param dedent_closing_brackets:
Puts closing brackets on a separate line, dedented, if the
bracketed expression can't fit in a single line. Applies to all
kinds of brackets, including function definitions and calls.
:param indent_dictionary_value:
Indents the dictionary value if it cannot fit on the same line as
the dictionary key.
:param coalesce_brackets:
Prevents splitting consecutive brackets. Only relevant when
``dedent_closing_brackets`` is set.
Example:
If ``True``,
```
call_func_that_takes_a_dict(
{
'key1': 'value1',
'key2': 'value2',
}
)
```
would reformat to:
```
call_func_that_takes_a_dict({
'key1': 'value1',
'key2': 'value2',
})
```
:param join_multiple_lines:
Joins short lines into one line.
:param spaces_around_power_operator:
Set to ``True`` to prefer using spaces around ``**``.
:param spaces_before_comment:
The number of spaces required before a trailing comment.
:param space_between_ending_comma_and_closing_bracket:
Inserts a space between the ending comma and closing bracket of a
list, etc.
:param split_arguments_when_comma_terminated:
Splits before arguments if the argument list is terminated by a
comma.
:param split_before_bitwise_operator:
Set to ``True`` to prefer splitting before ``&``, ``|`` or ``^``
rather than after.
:param split_before_first_argument:
If an argument / parameter list is going to be split, then split
before the first argument.
:param split_before_logical_operator:
Set to ``True`` to prefer splitting before ``and`` or ``or`` rather
than after.
:param split_before_named_assigns:
Splits named assignments into individual lines.
:param use_spaces:
Uses spaces for indentation.
:param based_on_style:
The formatting style to be used as reference.
:param prefer_line_break_after_opening_bracket:
If True, splitting right after a open bracket will not be
preferred.
"""
if not file:
# Yapf cannot handle zero-byte files well, and adds a redundent
# newline into the file. To avoid this, we don't parse zero-byte
# files as they cannot have anything to format either.
return
options = """
[style]
indent_width = {indent_size}
column_limit = {max_line_length}
allow_multiline_lambdas = {allow_multiline_lambdas}
continuation_indent_width = {continuation_tab_width}
dedent_closing_brackets = {dedent_closing_brackets}
indent_dictionary_value = {indent_dictionary_value}
join_multiple_lines = {join_multiple_lines}
spaces_around_power_operator = {spaces_around_power_operator}
spaces_before_comment = {spaces_before_comment}
coalesce_brackets = {coalesce_brackets}
split_before_bitwise_operator = {split_before_bitwise_operator}
split_before_first_argument = {split_before_first_argument}
split_before_logical_operator = {split_before_logical_operator}
split_before_named_assigns = {split_before_named_assigns}
based_on_style = {based_on_style}
blank_line_before_nested_class_or_def = {blank_line_before_nested_class_or_def}
split_arguments_when_comma_terminated = {split_arguments_when_comma_terminated}
space_between_ending_comma_and_closing_bracket= \
{space_between_ending_comma_and_closing_bracket}
"""
options += 'use_tabs = ' + str(not use_spaces) + '\n'
options += ('split_penalty_after_opening_bracket = ' +
('30' if prefer_line_break_after_opening_bracket
else '0') + '\n')
options = options.format(**locals())
try:
with prepare_file(options.splitlines(keepends=True),
None) as (file_, fname):
corrected = FormatCode(
''.join(file), style_config=fname)[0].splitlines(True)
except SyntaxError as err:
if isinstance(err, IndentationError):
error_type = 'indentation errors (' + err.args[0] + ')'
else:
error_type = 'syntax errors'
yield Result.from_values(
self,
'The code cannot be parsed due to {0}.'.format(error_type),
filename, line=err.lineno, column=err.offset)
return
diffs = Diff.from_string_arrays(file, corrected).split_diff()
for diff in diffs:
yield Result(self,
'The code does not comply with the settings '
'provided.',
affected_code=(diff.range(filename),),
diffs={filename: diff})
| agpl-3.0 |
dana-i2cat/felix | vt_manager/src/python/vt_manager/models/MacRange.py | 3 | 8197 | from django.db import models
from django.contrib import auth
from threading import Lock
import inspect
from vt_manager.utils.EthernetUtils import EthernetUtils
from vt_manager.utils.MutexStore import MutexStore
from vt_manager.models.MacSlot import MacSlot
'''
@author: msune
'''
class MacRange(models.Model):
from vt_manager.models.MacSlot import MacSlot
"""MacRange"""
class Meta:
"""Meta Class for your model."""
app_label = 'vt_manager'
'''
Private attributes
'''
#Range name
name = models.CharField(max_length = 255, default="", verbose_name = "Range name",unique=True)
isGlobal = models.BooleanField(verbose_name="Global range",default=1,help_text="Globa ranges will be used by servers which are not subscribed to any specific range")
#Range parameters
startMac = models.CharField(verbose_name="Start Mac Address", max_length = 17, default="", validators=[EthernetUtils.checkValidMac])
endMac = models.CharField(verbose_name="End Mac Address", max_length = 17, default="", validators=[EthernetUtils.checkValidMac])
#Pool of macs both assigned and excluded (particular case of assignment)
macs = models.ManyToManyField('MacSlot', blank = True, null = True, editable = False)
nextAvailableMac = models.CharField(verbose_name="Next Available Mac Address",max_length = 17, default="",editable=False)
#Statistics
numberOfSlots = models.BigIntegerField(blank = True, null=True, editable = False)
#Defines soft or hard state of the range
doSave = True
'''
Private methods
'''
@staticmethod
def constructor(name,startMac,endMac,isGlobal=True,save=True):
self = MacRange()
try:
#Default constructor
EthernetUtils.checkValidMac(startMac)
EthernetUtils.checkValidMac(endMac)
self.startMac = startMac.upper()
self.endMac = endMac.upper()
self.name = name
self.isGlobal= isGlobal
#Create an iterator
it= EthernetUtils.getMacIterator(self.startMac,self.endMac)
self.nextAvailableMac = it.getNextMac()
#Number of Slots
try:
self.numberOfSlots = EthernetUtils.getNumberOfSlotsInRange(startMac,endMac)
except Exception as e:
print "Exception doing slot calculation"+str(e)
self.numberOfSlots = -1
self.doSave = save
if save:
self.save()
except Exception as e:
print e
raise e
return self
def autoSave(self):
if self.doSave:
self.save()
def __setStartMac(self, value):
EthernetUtils.checkValidMac(value)
MAC4Utils.checkValidMac(value)
self.startMac = value.upper()
self.autoSave()
def __setEndMac(self, value):
EthernetUtils.checkValidMac(value)
self.endMac = value.upper()
self.autoSave()
def __isMacAvailable(self,mac):
return self.macs.filter(mac=mac).count() == 0
'''
Public methods
'''
def getLockIdentifier(self):
#Uniquely identifies object by a key
return inspect.currentframe().f_code.co_filename+str(self)+str(self.id)
def getName(self):
return self.name
def getStartMac(self):
return self.startMac
def getEndMac(self):
return self.endMac
def getIsGlobal(self):
return self.isGlobal
def getExcludedMacs(self):
return self.macs.filter(isExcluded=True).order_by('mac')
def getAllocatedMacs(self):
return self.macs.filter(isExcluded=False).order_by('mac')
def getNumberOfSlots(self):
return int(self.numberOfSlots)
def getPercentageRangeUsage(self):
if not self.numberOfSlots == -1:
return round((float(self.macs.all().count())/float(self.numberOfSlots))*100,2)
return -1
def destroy(self):
with MutexStore.getObjectLock(self.getLockIdentifier()):
if self.macs.filter(isExcluded=False).count() > 0:
raise Exception("Cannot delete MacRange. Range still contains allocated Macs")
for mac in self.macs.all():
#Delete excluded macs
mac.delete()
self.delete()
def allocateMac(self):
'''
Allocates an MAC address of the range
'''
with MutexStore.getObjectLock(self.getLockIdentifier()):
#Implements first fit algorithm
if self.nextAvailableMac == None:
raise Exception("Could not allocate any Mac")
newMac = MacSlot.macFactory(self,self.nextAvailableMac)
self.macs.add(newMac)
#Try to find new slot
try:
it= EthernetUtils.getMacIterator(self.nextAvailableMac,self.endMac)
while True:
mac = it.getNextMac()
if self.__isMacAvailable(mac):
break
self.nextAvailableMac = mac
except Exception as e:
self.nextAvailableMac = None
self.autoSave()
return newMac
def releaseMac(self,macObj):
'''
Releases an MAC address of the range (but it does not destroy the object!!)
'''
with MutexStore.getObjectLock(self.getLockIdentifier()):
macStr = macObj.getMac()
if not self.macs.filter(mac=macStr).count() > 0:
raise Exception("Cannot release Mac %s. Reason may be is unallocated or is an excluded Mac",macStr)
self.macs.remove(macObj)
#Determine new available Mac
if not self.nextAvailableMac == None:
if EthernetUtils.compareMacs(macStr,self.nextAvailableMac) > 0:
#Do nothing
pass
else:
self.nextAvailableMac = macStr
else:
#No more gaps
self.nextAvailableMac = macStr
self.autoSave()
def addExcludedMac(self,macStr,comment=""):
'''
Add an MAC to the exclusion list
'''
with MutexStore.getObjectLock(self.getLockIdentifier()):
#Check is not already allocated
if not self.__isMacAvailable(macStr):
raise Exception("Mac already allocated or marked as excluded")
#then forbidd
if not EthernetUtils.isMacInRange(macStr,self.startMac,self.endMac):
raise Exception("Mac is not in range")
newMac = MacSlot.excludedMacFactory(self,macStr,comment)
self.macs.add(newMac)
#if was nextSlot shift
if self.nextAvailableMac == macStr:
try:
it= EthernetUtils.getMacIterator(self.nextAvailableMac,self.endMac)
while True:
mac = it.getNextMac()
if self.__isMacAvailable(mac):
break
self.nextAvailableMac= mac
except Exception as e:
self.nextAvailableMac = None
self.autoSave()
def removeExcludedMac(self,macObj):
'''
Deletes an Mac from the exclusion list (but it does not destroy the object!!)
'''
with MutexStore.getObjectLock(self.getLockIdentifier()):
macStr = macObj.getMac()
if (not self.macs.get(mac=macStr).isExcludedMac()):
raise Exception("Cannot release Mac. Reason may be is unallocated or is not excluded Mac")
self.macs.remove(macObj)
#Determine new available Mac
if not self.nextAvailableMac == None:
if EthernetUtils.compareMacs(macStr,self.nextAvailableMac) > 0:
#Do nothing
pass
else:
self.nextAvailableMac = macStr
else:
#No more gaps
self.nextAvailableMac = macStr
self.autoSave()
'''
Static methods
'''
@staticmethod
def getAllocatedGlobalNumberOfSlots():
allocated = 0
for range in MacRange.objects.filter(isGlobal=True):
allocated += range.macs.all().count()
return allocated
@staticmethod
def getGlobalNumberOfSlots():
slots = 0
for range in MacRange.objects.filter(isGlobal=True):
slots += range.numberOfSlots
return int(slots)
def rebasePointer(self):
'''Used when pointer has lost track mostly due to bug #'''
with MutexStore.getObjectLock(self.getLockIdentifier()):
print "Rebasing pointer of range: "+str(self.id)
print "Current pointer point to: "+self.nextAvailableMac
try:
it= EthernetUtils.getMacIterator(self.startMac,self.endMac)
while True:
mac = it.getNextMac()
if self.__isMacAvailable(mac):
break
self.nextAvailableMac= mac
except Exception as e:
self.nextAvailableMac = None
print "Pointer will be rebased to: "+self.nextAvailableMac
self.save()
@staticmethod
def rebasePointers():
for range in MacRange.objects.all():
range.rebasePointer()
#slot = RangeSlot("127.0.0.1","127.0.0.255","255.255.255.0")
#slot.allocateMac()
#try:
# slot.releaseMac("d")
#except Exception:
# pass
#slot.allocateMac()
| apache-2.0 |
fengyuanjs/catawampus | tr/vendor/tornado/tornado/test/httputil_test.py | 12 | 7089 | #!/usr/bin/env python
from __future__ import absolute_import, division, with_statement
from tornado.httputil import url_concat, parse_multipart_form_data, HTTPHeaders
from tornado.escape import utf8
from tornado.testing import LogTrapTestCase
from tornado.util import b
import logging
import unittest
class TestUrlConcat(unittest.TestCase):
def test_url_concat_no_query_params(self):
url = url_concat(
"https://localhost/path",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=y&z=z")
def test_url_concat_encode_args(self):
url = url_concat(
"https://localhost/path",
[('y', '/y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=%2Fy&z=z")
def test_url_concat_trailing_q(self):
url = url_concat(
"https://localhost/path?",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?y=y&z=z")
def test_url_concat_q_with_no_trailing_amp(self):
url = url_concat(
"https://localhost/path?x",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?x&y=y&z=z")
def test_url_concat_trailing_amp(self):
url = url_concat(
"https://localhost/path?x&",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?x&y=y&z=z")
def test_url_concat_mult_params(self):
url = url_concat(
"https://localhost/path?a=1&b=2",
[('y', 'y'), ('z', 'z')],
)
self.assertEqual(url, "https://localhost/path?a=1&b=2&y=y&z=z")
def test_url_concat_no_params(self):
url = url_concat(
"https://localhost/path?r=1&t=2",
[],
)
self.assertEqual(url, "https://localhost/path?r=1&t=2")
class MultipartFormDataTest(LogTrapTestCase):
def test_file_upload(self):
data = b("""\
--1234
Content-Disposition: form-data; name="files"; filename="ab.txt"
Foo
--1234--""").replace(b("\n"), b("\r\n"))
args = {}
files = {}
parse_multipart_form_data(b("1234"), data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b("Foo"))
def test_unquoted_names(self):
# quotes are optional unless special characters are present
data = b("""\
--1234
Content-Disposition: form-data; name=files; filename=ab.txt
Foo
--1234--""").replace(b("\n"), b("\r\n"))
args = {}
files = {}
parse_multipart_form_data(b("1234"), data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b("Foo"))
def test_special_filenames(self):
filenames = ['a;b.txt',
'a"b.txt',
'a";b.txt',
'a;"b.txt',
'a";";.txt',
'a\\"b.txt',
'a\\b.txt',
]
for filename in filenames:
logging.info("trying filename %r", filename)
data = """\
--1234
Content-Disposition: form-data; name="files"; filename="%s"
Foo
--1234--""" % filename.replace('\\', '\\\\').replace('"', '\\"')
data = utf8(data.replace("\n", "\r\n"))
args = {}
files = {}
parse_multipart_form_data(b("1234"), data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], filename)
self.assertEqual(file["body"], b("Foo"))
def test_boundary_starts_and_ends_with_quotes(self):
data = b('''\
--1234
Content-Disposition: form-data; name="files"; filename="ab.txt"
Foo
--1234--''').replace(b("\n"), b("\r\n"))
args = {}
files = {}
parse_multipart_form_data(b('"1234"'), data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b("Foo"))
def test_missing_headers(self):
data = b('''\
--1234
Foo
--1234--''').replace(b("\n"), b("\r\n"))
args = {}
files = {}
parse_multipart_form_data(b("1234"), data, args, files)
self.assertEqual(files, {})
def test_invalid_content_disposition(self):
data = b('''\
--1234
Content-Disposition: invalid; name="files"; filename="ab.txt"
Foo
--1234--''').replace(b("\n"), b("\r\n"))
args = {}
files = {}
parse_multipart_form_data(b("1234"), data, args, files)
self.assertEqual(files, {})
def test_line_does_not_end_with_correct_line_break(self):
data = b('''\
--1234
Content-Disposition: form-data; name="files"; filename="ab.txt"
Foo--1234--''').replace(b("\n"), b("\r\n"))
args = {}
files = {}
parse_multipart_form_data(b("1234"), data, args, files)
self.assertEqual(files, {})
def test_content_disposition_header_without_name_parameter(self):
data = b("""\
--1234
Content-Disposition: form-data; filename="ab.txt"
Foo
--1234--""").replace(b("\n"), b("\r\n"))
args = {}
files = {}
parse_multipart_form_data(b("1234"), data, args, files)
self.assertEqual(files, {})
def test_data_after_final_boundary(self):
# The spec requires that data after the final boundary be ignored.
# http://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
# In practice, some libraries include an extra CRLF after the boundary.
data = b("""\
--1234
Content-Disposition: form-data; name="files"; filename="ab.txt"
Foo
--1234--
""").replace(b("\n"), b("\r\n"))
args = {}
files = {}
parse_multipart_form_data(b("1234"), data, args, files)
file = files["files"][0]
self.assertEqual(file["filename"], "ab.txt")
self.assertEqual(file["body"], b("Foo"))
class HTTPHeadersTest(unittest.TestCase):
def test_multi_line(self):
# Lines beginning with whitespace are appended to the previous line
# with any leading whitespace replaced by a single space.
# Note that while multi-line headers are a part of the HTTP spec,
# their use is strongly discouraged.
data = """\
Foo: bar
baz
Asdf: qwer
\tzxcv
Foo: even
more
lines
""".replace("\n", "\r\n")
headers = HTTPHeaders.parse(data)
self.assertEqual(headers["asdf"], "qwer zxcv")
self.assertEqual(headers.get_list("asdf"), ["qwer zxcv"])
self.assertEqual(headers["Foo"], "bar baz,even more lines")
self.assertEqual(headers.get_list("foo"), ["bar baz", "even more lines"])
self.assertEqual(sorted(list(headers.get_all())),
[("Asdf", "qwer zxcv"),
("Foo", "bar baz"),
("Foo", "even more lines")])
| apache-2.0 |
smallyear/linuxLearn | salt/salt/daemons/flo/__init__.py | 3 | 5034 | # -*- coding: utf-8 -*-
'''
Package for ioflo and raet based daemons and associated ioflo behaviors
To use set
opts['transport'] ='raet'
master minion config
transport: raet
See salt.config.py for relevant defaults
opts['raet_port']
opts['master_floscript']
opts['minion_floscript']
opts['ioflo_period']
opts['ioflo_realtime']
opts['ioflo_verbose']
opts['caller_floscript']
'''
# Import Python libs
from __future__ import absolute_import
import os
# Import modules
from . import core
from . import worker
from . import maint
from . import reactor
from . import zero
from . import jobber
from . import dummy
__all__ = ['core', 'worker', 'maint', 'zero', 'dummy', 'jobber', 'reactor']
# Import salt libs
import salt.daemons.masterapi
# Import 3rd-party libs
import ioflo.app.run
import salt.ext.six as six
def explode_opts(opts):
'''
Explode the opts into a preloads list
'''
preloads = [('.salt.opts', dict(value=opts))]
for key, val in six.iteritems(opts):
ukey = key.replace('.', '_')
preloads.append(('.salt.etc.{0}'.format(ukey), dict(value=val)))
preloads.append(('.salt.etc.id', dict(value=opts['id'])))
return preloads
class IofloMaster(object):
'''
IofloMaster Class
'''
def __init__(self, opts):
'''
Assign self.opts
'''
self.opts = opts
self.preloads = explode_opts(self.opts)
self.access_keys = salt.daemons.masterapi.access_keys(self.opts)
self.preloads.append(
('.salt.access_keys', dict(value=self.access_keys)))
def start(self, behaviors=None):
'''
Start up ioflo
port = self.opts['raet_port']
'''
if behaviors is None:
behaviors = []
behaviors.extend(['salt.daemons.flo'])
console_logdir = self.opts.get('ioflo_console_logdir', '')
if console_logdir:
consolepath = os.path.join(console_logdir, 'master.log')
else: # empty means log to std out
consolepath = ''
ioflo.app.run.start(
name='master',
period=float(self.opts['ioflo_period']),
stamp=0.0,
real=self.opts['ioflo_realtime'],
filepath=self.opts['master_floscript'],
behaviors=behaviors,
username="",
password="",
mode=None,
houses=None,
metas=None,
preloads=self.preloads,
verbose=int(self.opts['ioflo_verbose']),
consolepath=consolepath,
)
class IofloMinion(object):
'''
IofloMinion Class
'''
def __init__(self, opts):
'''
Assign self.opts
'''
self.opts = opts
def tune_in(self, behaviors=None):
'''
Start up ioflo
port = self.opts['raet_port']
'''
if behaviors is None:
behaviors = []
behaviors.extend(['salt.daemons.flo'])
preloads = explode_opts(self.opts)
console_logdir = self.opts.get('ioflo_console_logdir', '')
if console_logdir:
consolepath = os.path.join(console_logdir, 'minion.log')
else: # empty means log to std out
consolepath = ''
ioflo.app.run.start(
name=self.opts['id'],
period=float(self.opts['ioflo_period']),
stamp=0.0,
real=self.opts['ioflo_realtime'],
filepath=self.opts['minion_floscript'],
behaviors=behaviors,
username="",
password="",
mode=None,
houses=None,
metas=None,
preloads=preloads,
verbose=int(self.opts['ioflo_verbose']),
consolepath=consolepath,
)
start = tune_in # alias
def call_in(self, behaviors=None):
'''
Start up caller minion for salt-call when there is no local minion
'''
if behaviors is None:
behaviors = []
behaviors.extend(['salt.daemons.flo'])
preloads = explode_opts(self.opts)
console_logdir = self.opts.get('ioflo_console_logdir', '')
if console_logdir:
consolepath = os.path.join(console_logdir, 'caller.log')
else: # empty means log to std out
consolepath = ''
ioflo.app.run.start(
name=self.opts['id'],
period=float(self.opts['ioflo_period']),
stamp=0.0,
real=self.opts['ioflo_realtime'],
filepath=self.opts['caller_floscript'],
behaviors=behaviors,
username="",
password="",
mode=None,
houses=None,
metas=None,
preloads=preloads,
verbose=int(self.opts['ioflo_verbose']),
consolepath=consolepath,
)
| apache-2.0 |
diplomacy/research | diplomacy_research/__init__.py | 1 | 1696 | # ==============================================================================
# Copyright 2019 - Philip Paquette
#
# NOTICE: Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# ==============================================================================
""" Diplomacy Research """
# Setting up root logger
import os
import logging
import sys
# Adding path to proto/ dir
sys.path.append(os.path.join(os.path.dirname(__file__), 'proto'))
LOGGING_LEVEL = {'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG}.get(os.environ.get('DIPLOMACY_LOGGING', 'INFO'), logging.INFO)
# Defining root logger
ROOT_LOGGER = logging.getLogger('diplomacy_research')
ROOT_LOGGER.setLevel(LOGGING_LEVEL)
ROOT_LOGGER.propagate = False
# Adding output to stdout by default
STREAM_HANDLER = logging.StreamHandler(sys.stdout)
STREAM_HANDLER.setLevel(logging.DEBUG)
FORMATTER = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
STREAM_HANDLER.setFormatter(FORMATTER)
ROOT_LOGGER.addHandler(STREAM_HANDLER)
| mit |
georgid/SourceFilterContoursMelody | smstools/software/transformations_interface/harmonicTransformations_GUI_frame.py | 5 | 10584 | # GUI frame for the harmonicTransformations_function.py
from Tkinter import *
import tkFileDialog, tkMessageBox
import sys, os
import pygame
from scipy.io.wavfile import read
import numpy as np
import harmonicTransformations_function as hT
class HarmonicTransformations_frame:
def __init__(self, parent):
self.parent = parent
self.initUI()
pygame.init()
def initUI(self):
choose_label = "inputFile:"
Label(self.parent, text=choose_label).grid(row=0, column=0, sticky=W, padx=5, pady=(10,2))
#TEXTBOX TO PRINT PATH OF THE SOUND FILE
self.filelocation = Entry(self.parent)
self.filelocation.focus_set()
self.filelocation["width"] = 32
self.filelocation.grid(row=0,column=0, sticky=W, padx=(70, 5), pady=(10,2))
self.filelocation.delete(0, END)
self.filelocation.insert(0, '../../sounds/vignesh.wav')
#BUTTON TO BROWSE SOUND FILE
open_file = Button(self.parent, text="...", command=self.browse_file) #see: def browse_file(self)
open_file.grid(row=0, column=0, sticky=W, padx=(340, 6), pady=(10,2)) #put it beside the filelocation textbox
#BUTTON TO PREVIEW SOUND FILE
preview = Button(self.parent, text=">", command=self.preview_sound, bg="gray30", fg="white")
preview.grid(row=0, column=0, sticky=W, padx=(385,6), pady=(10,2))
## HARMONIC TRANSFORMATIONS ANALYSIS
#ANALYSIS WINDOW TYPE
wtype_label = "window:"
Label(self.parent, text=wtype_label).grid(row=1, column=0, sticky=W, padx=5, pady=(10,2))
self.w_type = StringVar()
self.w_type.set("blackman") # initial value
window_option = OptionMenu(self.parent, self.w_type, "rectangular", "hanning", "hamming", "blackman", "blackmanharris")
window_option.grid(row=1, column=0, sticky=W, padx=(65,5), pady=(10,2))
#WINDOW SIZE
M_label = "M:"
Label(self.parent, text=M_label).grid(row=1, column=0, sticky=W, padx=(180, 5), pady=(10,2))
self.M = Entry(self.parent, justify=CENTER)
self.M["width"] = 5
self.M.grid(row=1,column=0, sticky=W, padx=(200,5), pady=(10,2))
self.M.delete(0, END)
self.M.insert(0, "1201")
#FFT SIZE
N_label = "N:"
Label(self.parent, text=N_label).grid(row=1, column=0, sticky=W, padx=(255, 5), pady=(10,2))
self.N = Entry(self.parent, justify=CENTER)
self.N["width"] = 5
self.N.grid(row=1,column=0, sticky=W, padx=(275,5), pady=(10,2))
self.N.delete(0, END)
self.N.insert(0, "2048")
#THRESHOLD MAGNITUDE
t_label = "t:"
Label(self.parent, text=t_label).grid(row=1, column=0, sticky=W, padx=(330,5), pady=(10,2))
self.t = Entry(self.parent, justify=CENTER)
self.t["width"] = 5
self.t.grid(row=1, column=0, sticky=W, padx=(348,5), pady=(10,2))
self.t.delete(0, END)
self.t.insert(0, "-90")
#MIN DURATION SINUSOIDAL TRACKS
minSineDur_label = "minSineDur:"
Label(self.parent, text=minSineDur_label).grid(row=2, column=0, sticky=W, padx=(5, 5), pady=(10,2))
self.minSineDur = Entry(self.parent, justify=CENTER)
self.minSineDur["width"] = 5
self.minSineDur.grid(row=2, column=0, sticky=W, padx=(87,5), pady=(10,2))
self.minSineDur.delete(0, END)
self.minSineDur.insert(0, "0.1")
#MAX NUMBER OF HARMONICS
nH_label = "nH:"
Label(self.parent, text=nH_label).grid(row=2, column=0, sticky=W, padx=(145,5), pady=(10,2))
self.nH = Entry(self.parent, justify=CENTER)
self.nH["width"] = 5
self.nH.grid(row=2, column=0, sticky=W, padx=(172,5), pady=(10,2))
self.nH.delete(0, END)
self.nH.insert(0, "100")
#MIN FUNDAMENTAL FREQUENCY
minf0_label = "minf0:"
Label(self.parent, text=minf0_label).grid(row=2, column=0, sticky=W, padx=(227,5), pady=(10,2))
self.minf0 = Entry(self.parent, justify=CENTER)
self.minf0["width"] = 5
self.minf0.grid(row=2, column=0, sticky=W, padx=(275,5), pady=(10,2))
self.minf0.delete(0, END)
self.minf0.insert(0, "130")
#MAX FUNDAMENTAL FREQUENCY
maxf0_label = "maxf0:"
Label(self.parent, text=maxf0_label).grid(row=2, column=0, sticky=W, padx=(330,5), pady=(10,2))
self.maxf0 = Entry(self.parent, justify=CENTER)
self.maxf0["width"] = 5
self.maxf0.grid(row=2, column=0, sticky=W, padx=(380,5), pady=(10,2))
self.maxf0.delete(0, END)
self.maxf0.insert(0, "300")
#MAX ERROR ACCEPTED
f0et_label = "f0et:"
Label(self.parent, text=f0et_label).grid(row=3, column=0, sticky=W, padx=5, pady=(10,2))
self.f0et = Entry(self.parent, justify=CENTER)
self.f0et["width"] = 3
self.f0et.grid(row=3, column=0, sticky=W, padx=(42,5), pady=(10,2))
self.f0et.delete(0, END)
self.f0et.insert(0, "7")
#ALLOWED DEVIATION OF HARMONIC TRACKS
harmDevSlope_label = "harmDevSlope:"
Label(self.parent, text=harmDevSlope_label).grid(row=3, column=0, sticky=W, padx=(90,5), pady=(10,2))
self.harmDevSlope = Entry(self.parent, justify=CENTER)
self.harmDevSlope["width"] = 5
self.harmDevSlope.grid(row=3, column=0, sticky=W, padx=(190,5), pady=(10,2))
self.harmDevSlope.delete(0, END)
self.harmDevSlope.insert(0, "0.01")
#BUTTON TO DO THE ANALYSIS OF THE SOUND
self.compute = Button(self.parent, text="Analysis/Synthesis", command=self.analysis, bg="dark red", fg="white")
self.compute.grid(row=4, column=0, padx=5, pady=(10,5), sticky=W)
#BUTTON TO PLAY ANALYSIS/SYNTHESIS OUTPUT
self.output = Button(self.parent, text=">", command=lambda:self.play_out_sound('harmonicModel'), bg="gray30", fg="white")
self.output.grid(row=4, column=0, padx=(145,5), pady=(10,5), sticky=W)
###
#SEPARATION LINE
Frame(self.parent,height=1,width=50,bg="black").grid(row=5, pady=5, sticky=W+E)
###
#FREQUENCY SCALING FACTORS
freqScaling_label = "Frequency scaling factors (time, value pairs):"
Label(self.parent, text=freqScaling_label).grid(row=6, column=0, sticky=W, padx=5, pady=(5,2))
self.freqScaling = Entry(self.parent, justify=CENTER)
self.freqScaling["width"] = 35
self.freqScaling.grid(row=7, column=0, sticky=W+E, padx=5, pady=(0,2))
self.freqScaling.delete(0, END)
self.freqScaling.insert(0, "[0, 2.0, 1, 0.3]")
#FREQUENCY STRETCHING FACTORSharmonicModelTransformation
freqStretching_label = "Frequency stretching factors (time, value pairs):"
Label(self.parent, text=freqStretching_label).grid(row=8, column=0, sticky=W, padx=5, pady=(5,2))
self.freqStretching = Entry(self.parent, justify=CENTER)
self.freqStretching["width"] = 35
self.freqStretching.grid(row=9, column=0, sticky=W+E, padx=5, pady=(0,2))
self.freqStretching.delete(0, END)
self.freqStretching.insert(0, "[0, 1, 1, 1.5]")
#TIMBRE PRESERVATION
timbrePreservation_label = "Timbre preservation (1 preserves original timbre, 0 it does not):"
Label(self.parent, text=timbrePreservation_label).grid(row=10, column=0, sticky=W, padx=5, pady=(5,2))
self.timbrePreservation = Entry(self.parent, justify=CENTER)
self.timbrePreservation["width"] = 2
self.timbrePreservation.grid(row=10, column=0, sticky=W+E, padx=(395,5), pady=(5,2))
self.timbrePreservation.delete(0, END)
self.timbrePreservation.insert(0, "1")
#TIME SCALING FACTORS
timeScaling_label = "Time scaling factors (time, value pairs):"
Label(self.parent, text=timeScaling_label).grid(row=11, column=0, sticky=W, padx=5, pady=(5,2))
self.timeScaling = Entry(self.parent, justify=CENTER)
self.timeScaling["width"] = 35
self.timeScaling.grid(row=12, column=0, sticky=W+E, padx=5, pady=(0,2))
self.timeScaling.delete(0, END)
self.timeScaling.insert(0, "[0, 0, 0.671, 0.671, 1.978, 1.978+1.0]")
#BUTTON TO DO THE SYNTHESIS
self.compute = Button(self.parent, text="Apply Transformation", command=self.transformation_synthesis, bg="dark green", fg="white")
self.compute.grid(row=13, column=0, padx=5, pady=(10,15), sticky=W)
#BUTTON TO PLAY TRANSFORMATION SYNTHESIS OUTPUT
self.transf_output = Button(self.parent, text=">", command=lambda:self.play_out_sound('harmonicModelTransformation'), bg="gray30", fg="white")
self.transf_output.grid(row=13, column=0, padx=(165,5), pady=(10,15), sticky=W)
# define options for opening file
self.file_opt = options = {}
options['defaultextension'] = '.wav'
options['filetypes'] = [('All files', '.*'), ('Wav files', '.wav')]
options['initialdir'] = '../../sounds/'
options['title'] = 'Open a mono audio file .wav with sample frequency 44100 Hz'
def preview_sound(self):
filename = self.filelocation.get()
if filename[-4:] == '.wav':
(fs, x) = read(filename)
else:
tkMessageBox.showerror("Wav file", "The audio file must be a .wav")
return
if len(x.shape) > 1 :
tkMessageBox.showerror("Stereo file", "Audio file must be Mono not Stereo")
elif fs != 44100:
tkMessageBox.showerror("Sample Frequency", "Sample frequency must be 44100 Hz")
else:
sound = pygame.mixer.Sound(filename)
sound.play()
def browse_file(self):
self.filename = tkFileDialog.askopenfilename(**self.file_opt)
#set the text of the self.filelocation
self.filelocation.delete(0, END)
self.filelocation.insert(0,self.filename)
def analysis(self):
try:
inputFile = self.filelocation.get()
window = self.w_type.get()
M = int(self.M.get())
N = int(self.N.get())
t = int(self.t.get())
minSineDur = float(self.minSineDur.get())
nH = int(self.nH.get())
minf0 = int(self.minf0.get())
maxf0 = int(self.maxf0.get())
f0et = int(self.f0et.get())
harmDevSlope = float(self.harmDevSlope.get())
self.inputFile, self.fs, self.hfreq, self.hmag = hT.analysis(inputFile, window, M, N, t, minSineDur, nH, minf0, maxf0, f0et, harmDevSlope)
except ValueError:
tkMessageBox.showerror("Input values error", "Some parameters are incorrect")
def transformation_synthesis(self):
try:
inputFile = self.inputFile
fs = self.fs
hfreq = self.hfreq
hmag = self.hmag
freqScaling = np.array(eval(self.freqScaling.get()))
freqStretching = np.array(eval(self.freqStretching.get()))
timbrePreservation = int(self.timbrePreservation.get())
timeScaling = np.array(eval(self.timeScaling.get()))
hT.transformation_synthesis(inputFile, fs, hfreq, hmag, freqScaling, freqStretching, timbrePreservation, timeScaling)
except ValueError as errorMessage:
tkMessageBox.showerror("Input values error", errorMessage)
except AttributeError:
tkMessageBox.showerror("Analysis not computed", "First you must analyse the sound!")
def play_out_sound(self, extension):
filename = 'output_sounds/' + os.path.basename(self.filelocation.get())[:-4] + '_' + extension + '.wav'
if os.path.isfile(filename):
sound = pygame.mixer.Sound(filename)
sound.play()
else:
tkMessageBox.showerror("Output audio file not found", "The output audio file has not been computed yet")
| gpl-3.0 |
eezee-it/reporting-engine | report_custom_filename/__openerp__.py | 6 | 1566 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2014 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Custom report filenames",
"summary": "Configure the filename to use when downloading a report",
"version": "8.0.1.0.1",
"author": "Therp BV,Odoo Community Association (OCA)",
"license": "AGPL-3",
"complexity": "normal",
"category": "Reporting",
"depends": [
'web',
'email_template',
],
"data": [
"view/ir_actions_report_xml.xml",
],
"test": [
],
"auto_install": False,
"installable": True,
"application": False,
"external_dependencies": {
'python': ['jinja2'],
},
}
| agpl-3.0 |
rspavel/spack | var/spack/repos/builtin/packages/libjpeg-turbo/package.py | 4 | 3255 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class LibjpegTurbo(Package):
"""libjpeg-turbo is a fork of the original IJG libjpeg which uses SIMD to
accelerate baseline JPEG compression and decompression.
libjpeg is a library that implements JPEG image encoding, decoding and
transcoding.
"""
# https://github.com/libjpeg-turbo/libjpeg-turbo/blob/master/BUILDING.md
homepage = "https://libjpeg-turbo.org/"
url = "https://github.com/libjpeg-turbo/libjpeg-turbo/archive/2.0.3.tar.gz"
version('2.0.4', sha256='7777c3c19762940cff42b3ba4d7cd5c52d1671b39a79532050c85efb99079064')
version('2.0.3', sha256='a69598bf079463b34d45ca7268462a18b6507fdaa62bb1dfd212f02041499b5d')
version('2.0.2', sha256='b45255bd476c19c7c6b198c07c0487e8b8536373b82f2b38346b32b4fa7bb942')
version('1.5.90', sha256='cb948ade92561d8626fd7866a4a7ba3b952f9759ea3dd642927bc687470f60b7')
version('1.5.3', sha256='1a17020f859cb12711175a67eab5c71fc1904e04b587046218e36106e07eabde')
version('1.5.0', sha256='232280e1c9c3e6a1de95fe99be2f7f9c0362ee08f3e3e48d50ee83b9a2ed955b')
version('1.3.1', sha256='5008aeeac303ea9159a0ec3ccff295434f4e63b05aed4a684c9964d497304524')
provides('jpeg')
# Can use either of these. But in the current version of the package
# only nasm is used. In order to use yasm an environmental variable
# NASM must be set.
# TODO: Implement the selection between two supported assemblers.
# depends_on('yasm', type='build')
depends_on('nasm', type='build')
depends_on('autoconf', type='build', when='@1.3.1:1.5.3')
depends_on('automake', type='build', when='@1.3.1:1.5.3')
depends_on('libtool', type='build', when='@1.3.1:1.5.3')
depends_on('cmake', type='build', when='@1.5.90:')
@property
def libs(self):
return find_libraries('libjpeg*', root=self.prefix, recursive=True)
def flag_handler(self, name, flags):
if self.spec.satisfies('@1.5.90:'):
return (None, None, flags)
else:
# compiler flags for earlier version are injected into the
# spack compiler wrapper
return (flags, None, None)
def flags_to_build_system_args(self, flags):
# This only handles cflags, other flags are discarded
cmake_flag_args = []
if 'cflags' in flags and flags['cflags']:
cmake_flag_args.append('-DCMAKE_C_FLAGS={0}'.format(
' '.join(flags['cflags'])))
self.cmake_flag_args = cmake_flag_args
@when('@1.3.1:1.5.3')
def install(self, spec, prefix):
autoreconf('-ifv')
configure('--prefix=%s' % prefix)
make()
make('install')
@when('@1.5.90:')
def install(self, spec, prefix):
cmake_args = ['-GUnix Makefiles']
if hasattr(self, 'cmake_flag_args'):
cmake_args.extend(self.cmake_flag_args)
cmake_args.extend(std_cmake_args)
with working_dir('spack-build', create=True):
cmake('..', *cmake_args)
make()
make('install')
| lgpl-2.1 |
leighpauls/k2cro4 | third_party/WebKit/Source/ThirdParty/gyp/test/home_dot_gyp/gyptest-home-includes-regyp.py | 151 | 1287 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies inclusion of $HOME/.gyp/includes.gypi works properly with relocation
and with regeneration.
"""
import os
import TestGyp
# Regenerating build files when a gyp file changes is currently only supported
# by the make generator.
test = TestGyp.TestGyp(formats=['make'])
os.environ['HOME'] = os.path.abspath('home')
test.run_gyp('all.gyp', chdir='src')
# After relocating, we should still be able to build (build file shouldn't
# contain relative reference to ~/.gyp/includes.gypi)
test.relocate('src', 'relocate/src')
test.build('all.gyp', test.ALL, chdir='relocate/src')
test.run_built_executable('printfoo',
chdir='relocate/src',
stdout="FOO is fromhome\n");
# Building should notice any changes to ~/.gyp/includes.gypi and regyp.
test.sleep()
test.write('home/.gyp/include.gypi', test.read('home2/.gyp/include.gypi'))
test.build('all.gyp', test.ALL, chdir='relocate/src')
test.run_built_executable('printfoo',
chdir='relocate/src',
stdout="FOO is fromhome2\n");
test.pass_test()
| bsd-3-clause |
andrejcampa/compass | app/tests/autopilot/compass/__init__.py | 34 | 1068 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
"""Application autopilot helpers."""
import logging
import ubuntuuitoolkit
logger = logging.getLogger(__name__)
class AppException(ubuntuuitoolkit.ToolkitException):
"""Exception raised when there are problems with the Application."""
class TouchApp(object):
"""Autopilot helper object for the application."""
def __init__(self, app_proxy, test_type):
self.app = app_proxy
self.test_type = test_type
self.main_view = self.app.select_single(MainView)
@property
def pointing_device(self):
return self.app.pointing_device
class MainView(ubuntuuitoolkit.MainView):
"""A helper that makes it easy to interact with the mainview"""
def __init__(self, *args):
super(MainView, self).__init__(*args)
self.visible.wait_for(True, 30)
def get_button(self):
return self.select_single('Button', objectName="button")
def get_label(self):
return self.select_single('Label', objectName="label")
| gpl-3.0 |
anirudhSK/chromium | tools/telemetry/telemetry/core/browser_unittest.py | 3 | 6035 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import test
from telemetry.core import browser_finder
from telemetry.core import gpu_device
from telemetry.core import gpu_info
from telemetry.core import system_info
from telemetry.core import util
from telemetry.unittest import options_for_unittests
class BrowserTest(unittest.TestCase):
def setUp(self):
self._browser = None
def CreateBrowser(self,
extra_browser_args=None,
profile_type=None):
assert not self._browser
options = options_for_unittests.GetCopy()
if profile_type:
# TODO(jeremy): crbug.com/243912 profiles are only implemented on
# Desktop.
is_running_on_desktop = not (
options.browser_type.startswith('android') or
options.browser_type.startswith('cros'))
if not is_running_on_desktop:
logging.warn("Desktop-only test, skipping.")
return None
options.browser_options.profile_type = profile_type
if extra_browser_args:
options.AppendExtraBrowserArgs(extra_browser_args)
browser_to_create = browser_finder.FindBrowser(options)
if not browser_to_create:
raise Exception('No browser found, cannot continue test.')
self._browser = browser_to_create.Create()
self._browser.Start()
self._browser.SetHTTPServerDirectories(util.GetUnittestDataDir())
return self._browser
def tearDown(self):
if self._browser:
self._browser.Close()
def testBrowserCreation(self):
b = self.CreateBrowser()
self.assertEquals(1, len(b.tabs))
# Different browsers boot up to different things.
assert b.tabs[0].url
def testCommandLineOverriding(self):
# This test starts the browser with --user-agent=telemetry. This tests
# whether the user agent is then set.
flag1 = '--user-agent=telemetry'
b = self.CreateBrowser(extra_browser_args=[flag1])
t = b.tabs[0]
t.Navigate(b.http_server.UrlOf('blank.html'))
t.WaitForDocumentReadyStateToBeInteractiveOrBetter()
self.assertEquals(t.EvaluateJavaScript('navigator.userAgent'),
'telemetry')
def testVersionDetection(self):
b = self.CreateBrowser()
v = b._browser_backend._inspector_protocol_version # pylint: disable=W0212
self.assertTrue(v > 0)
v = b._browser_backend.chrome_branch_number # pylint: disable=W0212
self.assertTrue(v > 0)
def testNewCloseTab(self):
b = self.CreateBrowser()
if not b.supports_tab_control:
logging.warning('Browser does not support tab control, skipping test.')
return
existing_tab = b.tabs[0]
self.assertEquals(1, len(b.tabs))
existing_tab_url = existing_tab.url
new_tab = b.tabs.New()
self.assertEquals(2, len(b.tabs))
self.assertEquals(existing_tab.url, existing_tab_url)
self.assertEquals(new_tab.url, 'about:blank')
new_tab.Close()
self.assertEquals(1, len(b.tabs))
self.assertEquals(existing_tab.url, existing_tab_url)
def testMultipleTabCalls(self):
b = self.CreateBrowser()
b.tabs[0].Navigate(b.http_server.UrlOf('blank.html'))
b.tabs[0].WaitForDocumentReadyStateToBeInteractiveOrBetter()
def testTabCallByReference(self):
b = self.CreateBrowser()
tab = b.tabs[0]
tab.Navigate(b.http_server.UrlOf('blank.html'))
b.tabs[0].WaitForDocumentReadyStateToBeInteractiveOrBetter()
# Test flaky on windows: http://crbug.com/321527
@test.Disabled('win')
def testCloseReferencedTab(self):
b = self.CreateBrowser()
if not b.supports_tab_control:
logging.warning('Browser does not support tab control, skipping test.')
return
b.tabs.New()
tab = b.tabs[0]
tab.Navigate(b.http_server.UrlOf('blank.html'))
tab.Close()
self.assertEquals(1, len(b.tabs))
def testForegroundTab(self):
b = self.CreateBrowser()
if not b.supports_tab_control:
logging.warning('Browser does not support tab control, skipping test.')
return
# Should be only one tab at this stage, so that must be the foreground tab
original_tab = b.tabs[0]
self.assertEqual(b.foreground_tab, original_tab)
new_tab = b.tabs.New()
# New tab shouls be foreground tab
self.assertEqual(b.foreground_tab, new_tab)
# Make sure that activating the background tab makes it the foreground tab
original_tab.Activate()
self.assertEqual(b.foreground_tab, original_tab)
# Closing the current foreground tab should switch the foreground tab to the
# other tab
original_tab.Close()
self.assertEqual(b.foreground_tab, new_tab)
def testDirtyProfileCreation(self):
b = self.CreateBrowser(profile_type = 'small_profile')
# TODO(jeremy): crbug.com/243912 profiles are only implemented on Desktop
if not b:
return
self.assertEquals(1, len(b.tabs))
def testGetSystemInfo(self):
b = self.CreateBrowser()
if not b.supports_system_info:
logging.warning(
'Browser does not support getting system info, skipping test.')
return
info = b.GetSystemInfo()
self.assertTrue(isinstance(info, system_info.SystemInfo))
self.assertTrue(hasattr(info, 'model_name'))
self.assertTrue(hasattr(info, 'gpu'))
self.assertTrue(isinstance(info.gpu, gpu_info.GPUInfo))
self.assertTrue(hasattr(info.gpu, 'devices'))
self.assertTrue(len(info.gpu.devices) > 0)
for g in info.gpu.devices:
self.assertTrue(isinstance(g, gpu_device.GPUDevice))
def testGetSystemTotalMemory(self):
b = self.CreateBrowser()
self.assertTrue(b.memory_stats['SystemTotalPhysicalMemory'] > 0)
def testIsTracingRunning(self):
b = self.CreateBrowser()
if not b.supports_tracing:
return
self.assertFalse(b.is_tracing_running)
b.StartTracing()
self.assertTrue(b.is_tracing_running)
b.StopTracing()
self.assertFalse(b.is_tracing_running)
| bsd-3-clause |
ptoraskar/django | tests/template_tests/filter_tests/test_make_list.py | 345 | 1611 | from django.template.defaultfilters import make_list
from django.test import SimpleTestCase
from django.test.utils import str_prefix
from django.utils.safestring import mark_safe
from ..utils import setup
class MakeListTests(SimpleTestCase):
"""
The make_list filter can destroy existing escaping, so the results are
escaped.
"""
@setup({'make_list01': '{% autoescape off %}{{ a|make_list }}{% endautoescape %}'})
def test_make_list01(self):
output = self.engine.render_to_string('make_list01', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list02': '{{ a|make_list }}'})
def test_make_list02(self):
output = self.engine.render_to_string('make_list02', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list03':
'{% autoescape off %}{{ a|make_list|stringformat:"s"|safe }}{% endautoescape %}'})
def test_make_list03(self):
output = self.engine.render_to_string('make_list03', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
@setup({'make_list04': '{{ a|make_list|stringformat:"s"|safe }}'})
def test_make_list04(self):
output = self.engine.render_to_string('make_list04', {"a": mark_safe("&")})
self.assertEqual(output, str_prefix("[%(_)s'&']"))
class FunctionTests(SimpleTestCase):
def test_string(self):
self.assertEqual(make_list('abc'), ['a', 'b', 'c'])
def test_integer(self):
self.assertEqual(make_list(1234), ['1', '2', '3', '4'])
| bsd-3-clause |
entomb/CouchPotatoServer | libs/suds/xsd/sxbase.py | 193 | 19777 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
The I{sxbase} module provides I{base} classes that represent
schema objects.
"""
from logging import getLogger
from suds import *
from suds.xsd import *
from suds.sax.element import Element
from suds.sax import Namespace
log = getLogger(__name__)
class SchemaObject(object):
"""
A schema object is an extension to object object with
with schema awareness.
@ivar root: The XML root element.
@type root: L{Element}
@ivar schema: The schema containing this object.
@type schema: L{schema.Schema}
@ivar form_qualified: A flag that inidcates that @elementFormDefault
has a value of I{qualified}.
@type form_qualified: boolean
@ivar nillable: A flag that inidcates that @nillable
has a value of I{true}.
@type nillable: boolean
@ivar default: The default value.
@type default: object
@ivar rawchildren: A list raw of all children.
@type rawchildren: [L{SchemaObject},...]
"""
@classmethod
def prepend(cls, d, s, filter=Filter()):
"""
Prepend schema object's from B{s}ource list to
the B{d}estination list while applying the filter.
@param d: The destination list.
@type d: list
@param s: The source list.
@type s: list
@param filter: A filter that allows items to be prepended.
@type filter: L{Filter}
"""
i = 0
for x in s:
if x in filter:
d.insert(i, x)
i += 1
@classmethod
def append(cls, d, s, filter=Filter()):
"""
Append schema object's from B{s}ource list to
the B{d}estination list while applying the filter.
@param d: The destination list.
@type d: list
@param s: The source list.
@type s: list
@param filter: A filter that allows items to be appended.
@type filter: L{Filter}
"""
for item in s:
if item in filter:
d.append(item)
def __init__(self, schema, root):
"""
@param schema: The containing schema.
@type schema: L{schema.Schema}
@param root: The xml root node.
@type root: L{Element}
"""
self.schema = schema
self.root = root
self.id = objid(self)
self.name = root.get('name')
self.qname = (self.name, schema.tns[1])
self.min = root.get('minOccurs')
self.max = root.get('maxOccurs')
self.type = root.get('type')
self.ref = root.get('ref')
self.form_qualified = schema.form_qualified
self.nillable = False
self.default = root.get('default')
self.rawchildren = []
self.cache = {}
def attributes(self, filter=Filter()):
"""
Get only the attribute content.
@param filter: A filter to constrain the result.
@type filter: L{Filter}
@return: A list of tuples (attr, ancestry)
@rtype: [(L{SchemaObject}, [L{SchemaObject},..]),..]
"""
result = []
for child, ancestry in self:
if child.isattr() and child in filter:
result.append((child, ancestry))
return result
def children(self, filter=Filter()):
"""
Get only the I{direct} or non-attribute content.
@param filter: A filter to constrain the result.
@type filter: L{Filter}
@return: A list tuples: (child, ancestry)
@rtype: [(L{SchemaObject}, [L{SchemaObject},..]),..]
"""
result = []
for child, ancestry in self:
if not child.isattr() and child in filter:
result.append((child, ancestry))
return result
def get_attribute(self, name):
"""
Get (find) a I{non-attribute} attribute by name.
@param name: A attribute name.
@type name: str
@return: A tuple: the requested (attribute, ancestry).
@rtype: (L{SchemaObject}, [L{SchemaObject},..])
"""
for child, ancestry in self.attributes():
if child.name == name:
return (child, ancestry)
return (None, [])
def get_child(self, name):
"""
Get (find) a I{non-attribute} child by name.
@param name: A child name.
@type name: str
@return: A tuple: the requested (child, ancestry).
@rtype: (L{SchemaObject}, [L{SchemaObject},..])
"""
for child, ancestry in self.children():
if child.any() or child.name == name:
return (child, ancestry)
return (None, [])
def namespace(self, prefix=None):
"""
Get this properties namespace
@param prefix: The default prefix.
@type prefix: str
@return: The schema's target namespace
@rtype: (I{prefix},I{URI})
"""
ns = self.schema.tns
if ns[0] is None:
ns = (prefix, ns[1])
return ns
def default_namespace(self):
return self.root.defaultNamespace()
def unbounded(self):
"""
Get whether this node is unbounded I{(a collection)}
@return: True if unbounded, else False.
@rtype: boolean
"""
max = self.max
if max is None:
max = '1'
if max.isdigit():
return (int(max) > 1)
else:
return ( max == 'unbounded' )
def optional(self):
"""
Get whether this type is optional.
@return: True if optional, else False
@rtype: boolean
"""
min = self.min
if min is None:
min = '1'
return ( min == '0' )
def required(self):
"""
Get whether this type is required.
@return: True if required, else False
@rtype: boolean
"""
return ( not self.optional() )
def resolve(self, nobuiltin=False):
"""
Resolve and return the nodes true self.
@param nobuiltin: Flag indicates that resolution must
not continue to include xsd builtins.
@return: The resolved (true) type.
@rtype: L{SchemaObject}
"""
return self.cache.get(nobuiltin, self)
def sequence(self):
"""
Get whether this is an <xs:sequence/>
@return: True if <xs:sequence/>, else False
@rtype: boolean
"""
return False
def xslist(self):
"""
Get whether this is an <xs:list/>
@return: True if any, else False
@rtype: boolean
"""
return False
def all(self):
"""
Get whether this is an <xs:all/>
@return: True if any, else False
@rtype: boolean
"""
return False
def choice(self):
"""
Get whether this is n <xs:choice/>
@return: True if any, else False
@rtype: boolean
"""
return False
def any(self):
"""
Get whether this is an <xs:any/>
@return: True if any, else False
@rtype: boolean
"""
return False
def builtin(self):
"""
Get whether this is a schema-instance (xs) type.
@return: True if any, else False
@rtype: boolean
"""
return False
def enum(self):
"""
Get whether this is a simple-type containing an enumeration.
@return: True if any, else False
@rtype: boolean
"""
return False
def isattr(self):
"""
Get whether the object is a schema I{attribute} definition.
@return: True if an attribute, else False.
@rtype: boolean
"""
return False
def extension(self):
"""
Get whether the object is an extension of another type.
@return: True if an extension, else False.
@rtype: boolean
"""
return False
def restriction(self):
"""
Get whether the object is an restriction of another type.
@return: True if an restriction, else False.
@rtype: boolean
"""
return False
def mixed(self):
"""
Get whether this I{mixed} content.
"""
return False
def find(self, qref, classes=()):
"""
Find a referenced type in self or children.
@param qref: A qualified reference.
@type qref: qref
@param classes: A list of classes used to qualify the match.
@type classes: [I{class},...]
@return: The referenced type.
@rtype: L{SchemaObject}
@see: L{qualify()}
"""
if not len(classes):
classes = (self.__class__,)
if self.qname == qref and self.__class__ in classes:
return self
for c in self.rawchildren:
p = c.find(qref, classes)
if p is not None:
return p
return None
def translate(self, value, topython=True):
"""
Translate a value (type) to/from a python type.
@param value: A value to translate.
@return: The converted I{language} type.
"""
return value
def childtags(self):
"""
Get a list of valid child tag names.
@return: A list of child tag names.
@rtype: [str,...]
"""
return ()
def dependencies(self):
"""
Get a list of dependancies for dereferencing.
@return: A merge dependancy index and a list of dependancies.
@rtype: (int, [L{SchemaObject},...])
"""
return (None, [])
def autoqualified(self):
"""
The list of I{auto} qualified attribute values.
Qualification means to convert values into I{qref}.
@return: A list of attibute names.
@rtype: list
"""
return ['type', 'ref']
def qualify(self):
"""
Convert attribute values, that are references to other
objects, into I{qref}. Qualfied using default document namespace.
Since many wsdls are written improperly: when the document does
not define a default namespace, the schema target namespace is used
to qualify references.
"""
defns = self.root.defaultNamespace()
if Namespace.none(defns):
defns = self.schema.tns
for a in self.autoqualified():
ref = getattr(self, a)
if ref is None:
continue
if isqref(ref):
continue
qref = qualify(ref, self.root, defns)
log.debug('%s, convert %s="%s" to %s', self.id, a, ref, qref)
setattr(self, a, qref)
def merge(self, other):
"""
Merge another object as needed.
"""
other.qualify()
for n in ('name',
'qname',
'min',
'max',
'default',
'type',
'nillable',
'form_qualified',):
if getattr(self, n) is not None:
continue
v = getattr(other, n)
if v is None:
continue
setattr(self, n, v)
def content(self, collection=None, filter=Filter(), history=None):
"""
Get a I{flattened} list of this nodes contents.
@param collection: A list to fill.
@type collection: list
@param filter: A filter used to constrain the result.
@type filter: L{Filter}
@param history: The history list used to prevent cyclic dependency.
@type history: list
@return: The filled list.
@rtype: list
"""
if collection is None:
collection = []
if history is None:
history = []
if self in history:
return collection
history.append(self)
if self in filter:
collection.append(self)
for c in self.rawchildren:
c.content(collection, filter, history[:])
return collection
def str(self, indent=0, history=None):
"""
Get a string representation of this object.
@param indent: The indent.
@type indent: int
@return: A string.
@rtype: str
"""
if history is None:
history = []
if self in history:
return '%s ...' % Repr(self)
history.append(self)
tab = '%*s'%(indent*3, '')
result = []
result.append('%s<%s' % (tab, self.id))
for n in self.description():
if not hasattr(self, n):
continue
v = getattr(self, n)
if v is None:
continue
result.append(' %s="%s"' % (n, v))
if len(self):
result.append('>')
for c in self.rawchildren:
result.append('\n')
result.append(c.str(indent+1, history[:]))
if c.isattr():
result.append('@')
result.append('\n%s' % tab)
result.append('</%s>' % self.__class__.__name__)
else:
result.append(' />')
return ''.join(result)
def description(self):
"""
Get the names used for str() and repr() description.
@return: A dictionary of relavent attributes.
@rtype: [str,...]
"""
return ()
def __str__(self):
return unicode(self).encode('utf-8')
def __unicode__(self):
return unicode(self.str())
def __repr__(self):
s = []
s.append('<%s' % self.id)
for n in self.description():
if not hasattr(self, n):
continue
v = getattr(self, n)
if v is None:
continue
s.append(' %s="%s"' % (n, v))
s.append(' />')
myrep = ''.join(s)
return myrep.encode('utf-8')
def __len__(self):
n = 0
for x in self: n += 1
return n
def __iter__(self):
return Iter(self)
def __getitem__(self, index):
i = 0
for c in self:
if i == index:
return c
class Iter:
"""
The content iterator - used to iterate the L{Content} children. The iterator
provides a I{view} of the children that is free of container elements
such as <sequence/> and <choice/>.
@ivar stack: A stack used to control nesting.
@type stack: list
"""
class Frame:
""" A content iterator frame. """
def __init__(self, sx):
"""
@param sx: A schema object.
@type sx: L{SchemaObject}
"""
self.sx = sx
self.items = sx.rawchildren
self.index = 0
def next(self):
"""
Get the I{next} item in the frame's collection.
@return: The next item or None
@rtype: L{SchemaObject}
"""
if self.index < len(self.items):
result = self.items[self.index]
self.index += 1
return result
def __init__(self, sx):
"""
@param sx: A schema object.
@type sx: L{SchemaObject}
"""
self.stack = []
self.push(sx)
def push(self, sx):
"""
Create a frame and push the specified object.
@param sx: A schema object to push.
@type sx: L{SchemaObject}
"""
self.stack.append(Iter.Frame(sx))
def pop(self):
"""
Pop the I{top} frame.
@return: The popped frame.
@rtype: L{Frame}
@raise StopIteration: when stack is empty.
"""
if len(self.stack):
return self.stack.pop()
else:
raise StopIteration()
def top(self):
"""
Get the I{top} frame.
@return: The top frame.
@rtype: L{Frame}
@raise StopIteration: when stack is empty.
"""
if len(self.stack):
return self.stack[-1]
else:
raise StopIteration()
def next(self):
"""
Get the next item.
@return: A tuple: the next (child, ancestry).
@rtype: (L{SchemaObject}, [L{SchemaObject},..])
@raise StopIteration: A the end.
"""
frame = self.top()
while True:
result = frame.next()
if result is None:
self.pop()
return self.next()
if isinstance(result, Content):
ancestry = [f.sx for f in self.stack]
return (result, ancestry)
self.push(result)
return self.next()
def __iter__(self):
return self
class XBuiltin(SchemaObject):
"""
Represents an (xsd) schema <xs:*/> node
"""
def __init__(self, schema, name):
"""
@param schema: The containing schema.
@type schema: L{schema.Schema}
"""
root = Element(name)
SchemaObject.__init__(self, schema, root)
self.name = name
self.nillable = True
def namespace(self, prefix=None):
return Namespace.xsdns
def builtin(self):
return True
def resolve(self, nobuiltin=False):
return self
class Content(SchemaObject):
"""
This class represents those schema objects that represent
real XML document content.
"""
pass
class NodeFinder:
"""
Find nodes based on flexable criteria. The I{matcher} is
may be any object that implements a match(n) method.
@ivar matcher: An object used as criteria for match.
@type matcher: I{any}.match(n)
@ivar limit: Limit the number of matches. 0=unlimited.
@type limit: int
"""
def __init__(self, matcher, limit=0):
"""
@param matcher: An object used as criteria for match.
@type matcher: I{any}.match(n)
@param limit: Limit the number of matches. 0=unlimited.
@type limit: int
"""
self.matcher = matcher
self.limit = limit
def find(self, node, list):
"""
Traverse the tree looking for matches.
@param node: A node to match on.
@type node: L{SchemaObject}
@param list: A list to fill.
@type list: list
"""
if self.matcher.match(node):
list.append(node)
self.limit -= 1
if self.limit == 0:
return
for c in node.rawchildren:
self.find(c, list)
return self | gpl-3.0 |
MiczFlor/Booktype | lib/booktype/constants.py | 1 | 22740 | # This file is part of Booktype.
# Copyright (c) 2012 Aleksandar Erkalovic <aleksandar.erkalovic@sourcefabric.org>
#
# Booktype is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Booktype is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Booktype. If not, see <http://www.gnu.org/licenses/>.
import os
from django.utils.translation import ugettext_noop
# SSL cert verification during request using 'requests' lib
REQUESTS_VERIFY_SSL_CERT = True
# SECURITY CLASS
BOOKTYPE_BASE_SECURITY_CLASS = 'booktype.utils.security.base.BaseSecurity'
# Should track changes be turned on for the book
BOOK_TRACK_CHANGES = False
# CHAPTER STATUS RELATED
CHAPTER_STATUS_LIST = [
{'name': ugettext_noop('new'), 'color': '#3a87ad'},
{'name': ugettext_noop('needs content'), 'color': '#ff0000'},
{'name': ugettext_noop('completed'), 'color': '#5cb85c'},
{'name': ugettext_noop('to be proofed'), 'color': '#f0ad4e'}
]
CHAPTER_STATUS_DEFAULT = CHAPTER_STATUS_LIST[0]['name']
# IMPORTERS RELATED STUFF
BOOKTYPE_IMPORTERS = {
'epub': ('booktype.importer.epub', 'import_epub'),
'docx': ('booktype.importer.docx', 'import_docx')
}
# Default styles matched so far. We'll add more in future
# these constants are used on docimporter.py to correctly
# assign classes to imported elements
DOCX_PARAGRAPH_STYLES_MAP = {
'AuthorName': 'authorname',
'Reference': 'reference',
'Citation': 'bk-cite'
}
# Which elements are considered <h1> style
H1_STYLES = ['heading1']
# Which elements are considered <h2> style
H2_STYLES = ['heading2']
# Which elements are considered <h3> style
H3_STYLES = ['heading3']
# Which elements are considered <h4> style
H4_STYLES = ['heading4']
# Which elements are considered <h5> style
H5_STYLES = ['heading5']
# Which elements are considered <h6> style
H6_STYLES = ['heading6']
# All of our Heading styles
DOCX_HEADING_STYLES = H1_STYLES + H2_STYLES + H3_STYLES + H4_STYLES + H5_STYLES + H6_STYLES
DOCX_HEADING_STYLES_TUPLE = (
('h1', H1_STYLES),
('h2', H2_STYLES),
('h3', H3_STYLES),
('h4', H4_STYLES),
('h5', H5_STYLES),
('h6', H6_STYLES)
)
# This will allow settings custom class on clients
DOCX_IMPORTER_CLASS = 'booktype.importer.WordImporter'
# END IMPORTERS STUFF
# SERVER RELATED
THIS_BOOKI_SERVER = os.environ.get('HTTP_HOST', 'booktype-demo.sourcefabric.org')
# ADMINISTRATIVE RELATED
CREATE_BOOK_VISIBLE = True
CREATE_BOOK_LICENSE = ""
FREE_REGISTRATION = True
ADMIN_CREATE_BOOKS = False
ADMIN_IMPORT_BOOKS = False
BOOKTYPE_MAX_USERS = 0
BOOKTYPE_MAX_BOOKS = 0
BOOKTYPE_BOOKS_PER_USER = -1
GROUP_LIST_PAGE_SIZE = 20
USER_LIST_PAGE_SIZE = 20
BOOK_LIST_PAGE_SIZE = 20
# google analytics
USE_GOOGLE_ANALYTICS = False
GOOGLE_ANALYTICS_ID = ''
# reports
REPORTS_EMAIL_FROM = 'booktype@booktype.pro'
REPORTS_EMAIL_USERS = ['booktype@booktype.pro']
REPORTS_CUSTOM_FONT_PATH = False
MAX_ADDITIONAL_METADATA = 3
# IMPORT RELATED
EPUB_COVER_MIN_DPI = 300
EPUB_COVER_MIN_SIZE = 500
EPUB_COVER_MAX_SIZE = 2800
EPUB_COVER_MAX_PIXELS = 3200000
# PUBLISHING RELATED
PUBLISH_OPTIONS = ['mpdf', 'screenpdf', 'epub3', 'epub2', 'icml', 'docx', 'mobi', 'xhtml']
# mobi conversion
# Options are "kindlegen" or "calibre"
MOBI_CONVERT = "calibre"
KINDLEGEN_PATH = "kindlegen"
CALIBRE_PATH = "ebook-convert"
CALIBRE_ARGS = ""
OBJAVI_URL = "http://objavi.booktype.pro/objavi.cgi"
ESPRI_URL = "http://objavi.booktype.pro/espri.cgi"
# theme plugins
BOOKTYPE_THEME_PLUGINS = {
'custom': 'booktype.apps.themes.convert.custom',
'academic': 'booktype.apps.themes.convert.academic'
}
# define path to module where class ExportBook is located
BOOKTYPE_EXPORT_CLASS_MODULE = 'booktype.apps.export.utils'
EXPORT_WAIT_FOR = 90
# convert constants
CONVERT_EDITOR_WIDTH = 898
XHTML_DOCUMENT_WIDTH = 2480
MOBI_DOCUMENT_WIDTH = 1500
EPUB_DOCUMENT_WIDTH = 1500
# editor stuff here
EDITOR_AUTOSAVE_ENABLED = False # disabled by default
EDITOR_AUTOSAVE_DELAY = 60 # time in seconds
EDITOR_SETTINGS_ROLES_SHOW_PERMISSIONS = 0
# end editor stuff
EPUB_NOT_ALLOWED_TAGS = (
# 'strip' - drop tag, leave content
# 'drop' - drop tag, drop content
# 'replace' - replace tag with 'replacement'
# EXAMPLES:
# {'tag': 'i', 'action': 'strip'},
# {'tag': 'b', 'action': 'drop'},
# {
# 'tag': 'u',
# 'action': 'replace',
# 'replacement': {
# 'tag': 'span',
# 'attrs': (
# ('style', 'text-decoration: underline;'),
# ('class', 'happy'),
# )
# }
# },
)
# According to epubcheck, after(inside) body tag,
# on the 1st level of deepness, must be only the next list of tags.
# If tag doesn't fit requierements, it will be replaced with "<p>"
EPUB_AVAILABLE_INBODY_ROOT_TAGS = (
'address', 'blockquote', 'del', 'div', 'dl', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'hr', 'ins', 'noscript', 'ns:svg', 'ol', 'p', 'pre', 'script', 'table', 'ul'
)
# mapping tag and allowed attributes in it
# required by epubcheck
EPUB_ALLOWED_TAG_ATTRS = (
('ol', ('class', 'dir', 'id', 'lang', 'style', 'title', 'xml:lang')),
)
EXPORT_SETTINGS = {
'mpdf': [
{u'name': u'size', u'value': u'A4'}, {u'name': u'custom_width', u'value': u''},
{u'name': u'custom_height', u'value': u''}, {u'name': u'top_margin', u'value': u'20'},
{u'name': u'side_margin', u'value': u'20'}, {u'name': u'bottom_margin', u'value': u'20'},
{u'name': u'gutter', u'value': u'20'}, {u'name': u'show_header', u'value': u'on'},
{u'name': u'header_margin', u'value': u'10'}, {u'name': u'show_footer', u'value': u'on'},
{u'name': u'footer_margin', u'value': u'10'}, {u'name': u'bleed_size', u'value': u''},
{u'name': u'styling', u'value': u''}, {u'name': u'crop_marks', u'value': u'off'}],
'screenpdf': [
{u'name': u'size', u'value': u'A4'}, {u'name': u'custom_width', u'value': u''},
{u'name': u'custom_height', u'value': u''}, {u'name': u'top_margin', u'value': u'20'},
{u'name': u'side_margin', u'value': u'20'}, {u'name': u'bottom_margin', u'value': u'20'},
{u'name': u'gutter', u'value': u'20'}, {u'name': u'show_header', u'value': u'on'},
{u'name': u'header_margin', u'value': u'10'}, {u'name': u'show_footer', u'value': u'on'},
{u'name': u'footer_margin', u'value': u'10'}, {u'name': u'cover_image', u'value': u' '},
{u'name': u'styling', u'value': u''}],
'epub2': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'epub3': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'icml': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'docx': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'mobi': [{u'name': u'cover_image', u'value': u' '}, {u'name': u'styling', u'value': u''}],
'xhtml': [{u'name': u'styling', u'value': u''}]
}
INCH_TO_MM = 25.4
PAGE_SIZE_DATA = {
'comicbook': (6.625 * INCH_TO_MM, 10.25 * INCH_TO_MM),
"pocket": (4.25 * INCH_TO_MM, 6.875 * INCH_TO_MM),
"usletter": (8.5 * INCH_TO_MM, 11 * INCH_TO_MM),
"ustrade6x9": (6 * INCH_TO_MM, 9 * INCH_TO_MM),
"ustrade": (6 * INCH_TO_MM, 9 * INCH_TO_MM),
"landscape9x7": (9 * INCH_TO_MM, 7 * INCH_TO_MM),
"square7.5": (7.5 * INCH_TO_MM, 7.5 * INCH_TO_MM),
"royal": (6.139 * INCH_TO_MM, 9.21 * INCH_TO_MM),
"crownquarto": (7.444 * INCH_TO_MM, 9.681 * INCH_TO_MM),
"square8.5": (8.5 * INCH_TO_MM, 8.5 * INCH_TO_MM),
"us5.5x8.5": (5.5 * INCH_TO_MM, 8.5 * INCH_TO_MM),
"digest": (5.5 * INCH_TO_MM, 8.5 * INCH_TO_MM),
"us5x8": (5 * INCH_TO_MM, 8 * INCH_TO_MM),
"us7x10": (7 * INCH_TO_MM, 10 * INCH_TO_MM),
"a5": (148, 210),
"a4": (210, 297),
"a3 (nz tabloid)": (297, 420),
"a2 (nz broadsheet)": (420, 594),
"a1": (594, 841),
"b5": (176, 250),
"b4": (250, 353),
"b3": (353, 500),
"b2": (500, 707),
"b1": (707, 1000),
# Not so sure about next 3
"uk tabloid": (11 * INCH_TO_MM, 17 * INCH_TO_MM),
"uk broadsheet": (18 * INCH_TO_MM, 24 * INCH_TO_MM),
"us broadsheet": (15 * INCH_TO_MM, 22.75 * INCH_TO_MM),
"berliner" : (315, 470),
"foolscap (f4)": (210, 330),
"oamaru broadsheet" :(382, 540),
"oamaru tabloid": (265, 380),
}
# These are default options for CSS settings
BOOKTYPE_CSS_BOOK = ('.objavi-chapter{ color: #000; }'
'a { text-decoration:none; color:#000; } '
'h1 .initial{ color: #000; } '
'.objavi-subsection{ display: block; '
'page-break-before: always; '
'/* page-break-after: always;*/ '
'text-transform: uppercase; font-size: 20pt; }'
'body .objavi-subsection:first-child{ '
'page-break-before: avoid; } '
'.objavi-subsection .initial { '
'font-size: 1em; color: #000; }'
'.objavi-subsection-heading { font-size: 20pt; '
'text-align: center; '
'line-height: 300px; font-weight: normal; } '
'h1 { page-break-before: always; } '
'table { float: none; }'
'h1.frontpage{ page-break-after:always; margin-top:70%; '
'font-size: 20pt; '
'text-align: center; page-break-before: avoid; '
'font-weight: normal; }'
'div.copyright{ padding: 1em; } '
'/* TOC ******************************/ '
'table { float: none; } '
'table.toc { font-size: 1.1em; width: 95%; } '
'table.toc td{ vertical-align:top padding-left: 0.5em; } '
'td.chapter { padding: 0 0.5em; text-align: right; } '
'table.toc td.pagenumber { text-align: right; '
'vertical-align:bottom; } '
'td.section { padding-top: 1.1em; font-weight: bold; } '
'/* End TOC **************************/ '
'pre { overflow: hidden; white-space: pre-wrap; } '
'h1 h2 h3 h4 h5 h6{ page-break-after: avoid; '
'page-break-inside: avoid; } '
'.page-break{ page-break-before: always; height: 7em; '
'display: block; } '
'#right-footer { text-align: right; } '
'#left-footer { text-align: left; } '
'a { word-wrap: break-word; } '
'.objavi-no-page-break { page-break-inside: avoid; } '
'.unseen{ z-index: -66; margin-left: -1000pt; }'
'sup {vertical-align:text-top;font-size:0.7em; }'
'img { max-width: 95%; }'
'p { word-wrap: break-word; }'
'li { word-wrap: break-word; }'
'#InsertNote_NoteList { word-wrap: break-word; }')
BOOKTYPE_CSS_BOOKJS = ('/* DOCUMENT */ @page { size: auto;}'
'body { word-break: break-word; -webkit-hyphens: auto;'
'hyphens: auto; font-family: "Liberation Serif";'
'background-color: white;}' '/* CONTENT */'
'img { max-width: 90%; height: auto;'
'image-resolution: from-image;}'
'sup { font-size: 80%;}'
'p { line-height: 130%; word-break: break-word;'
'/* text-align: justify; */'
'text-align: left;}'
'a { color: #000; text-decoration: none; '
'word-wrap: break-word;}'
'ol ul { text-align: justify;}'
'li { margin-left: 1em; word-wrap: break-word; '
'page-break-inside: avoid; windows: 4; orphans: 4;}'
'/* HEADINGS */'
'h1 {}'
'h1 .initial { display: none;}'
'h1 .subtitle {}'
'h1 .author { display: block; margin-top: 0.2in; '
'font-weight: normal;}'
'h1 .comma { font-size: 22pt; display: none;}'
'h2 { page-break-after: avoid;}'
'h3 { page-break-after: avoid;}'
'h4 { page-break-after: avoid;}'
'h5 { font-weight: normal; text-align: left;'
'page-break-after: avoid;}'
'/* CODE BLOCKS */'
'pre { white-space: pre-wrap; /* css-3 */ '
'white-space: -moz-pre-wrap; /* Mozilla since 1999 */'
'white-space: -pre-wrap;/* Opera 4-6 */'
'white-space: -o-pre-wrap; /* Opera 7 */'
'word-wrap: break-word; /* Internet Explorer 5.5+ */'
'widows:4; orphans:4;}'
'code {}'
'/* TOC */'
'#pagination-toc-title { font-size: 20pt; '
'font-weight: 700; text-align: left; '
'padding-bottom: .4in;}'
'.pagination-toc-entry {/* width: 6.2in; */ '
'width: 90%; display: block; padding-bottom: .3in; '
'font-size: 16pt;}'
'.pagination-toc-entry .pagination-toc-pagenumber { '
'font-weight: 400; display: inline-block; '
'vertical-align: text-bottom; font-size: 16pt; '
'float:right; '
'/* SET AUTOMATICALLY */}'
'.pagination-toc-entry.section { font-weight:700; '
'font-size: 16pt; text-transform: uppercase; '
'padding-bottom: .3in;}'
'/* FRONT MATTER */'
'#booktitle { margin-top: 1.7in; font-size: 26pt; '
'font-weight: normal; text-align: center; '
'text-transform: uppercase;}'
'#booksubtitle { font-size: 22px; margin-top: 0.2in; '
'font-weight: normal; text-align: center;}'
'#bookeditors { padding-top: 1.5in; '
'font-weight: normal; text-align: center; '
'font-size: 24pt;}'
'#bookpress { padding-top: 1.8in; font-weight: normal;'
'text-align: center; font-size: 24pt;}'
'#copyrightpage { font-weight: normal; '
'font-size: 18pt; padding-top: 0.2in;}'
'/* HEADER */'
'.pagination-header {font-size: 12pt;'
'font-weight: light;}'
'.pagination-pagenumber {font-size: 12pt;}'
'.pagination-header '
'.pagination-section { display: none; }'
'.pagination-toc-text .initial { display: none; }'
'.pagination-chapter .initial { display: none; }'
'/* MISC */'
'.imagecaption { font-size: 9pt; padding-left: 0.2in;'
'line-height: 18px; text-align: justify;'
'font-weight: normal; display: block;}'
'.pagebreak { -webkit-region-break-after: always;}'
'.pagebreakbefore{'
' -webkit-region-break-before: always;}'
'.objavi-chapter .initial { display: none;}'
'.objavi-subsection { display: none;}'
'.objavi-subsection-heading { '
'line-height: 120px !important; '
'/* work-around to make section title pages no longer '
'than one page */ font-size: 22px; font-weight: bold;'
' text-align: left; display: none;}'
'@media screen { .page { border: solid 1px #000;'
' margin-bottom: .2in; }'
'body { background-color: #efefef; }}'
'#InsertNote_NoteList { word-wrap: break-word;}')
BOOKTYPE_CSS_EBOOK = ('.objavi-chapter{ color: #000; display:none;} '
'a { text-decoration:none; color:#000;} '
'h1 .initial{ color: #000; display:none;} '
'.objavi-subsection{ display: block; '
'page-break-before: always;} '
'body .objavi-subsection:first-child{ '
'page-break-before: avoid;} '
'.objavi-subsection .initial { color: #000; '
'display:none;} .objavi-subsection-heading {'
'font-size: 20pt; text-align: center; '
'line-height: 300px; font-weight: normal;}'
'table { float: none;} h1.frontpage{'
'page-break-after:always; margin-top:70%; '
'font-size: 20pt; text-align: center;'
'page-break-before: avoid; max-width: 700pt; '
'font-weight: normal;} div.copyright{padding: 1em;}'
'/* TOC ******************************/'
'table { float: none;}'
'table.toc { font-size: 1.1em; width: 95%;}'
'table.toc td{ vertical-align:top; padding-left: 0.5em;}'
'td.chapter { padding: 0 0.5em; text-align: right;} '
'table.toc td.pagenumber { text-align: right; '
'vertical-align:bottom;} '
'td.section { padding-top: 1.1em; font-weight: bold;}'
'/* End TOC **************************/ '
'img { max-width: 500px; height: auto;}'
'.objavi-no-page-break {page-break-inside: avoid;} '
'.unseen { z-index: -66; margin-left: -1000pt;} '
'.objavi-subsection-heading{ height:860px; '
'font-size:0px; display:block;}')
BOOKTYPE_CSS_PDF = ('.objavi-subsection{ display: block; '
'page-break-before: always; /* page-break-after: always;*/'
'text-transform: uppercase; font-size: 20pt; } '
'body .objavi-subsection:first-child{ '
'page-break-before: avoid; } '
'.objavi-subsection .initial { font-size: 1em;'
'color: #000; } .objavi-subsection-heading {'
'font-size: 20pt; text-align: center; line-height: 300px;'
'font-weight: normal;} h1 { page-break-before: always; } '
'table { float: none; } '
'h1.frontpage{ page-break-after:always; margin-top:70%; '
'font-size: 20pt; text-align: center; '
'page-break-before: avoid; font-weight: normal; } '
'div.copyright{ padding: 1em; } '
'/* TOC ******************************/ '
'table { float: none; } '
'table.toc { font-size: 1.1em; width: 95%; } '
'table.toc td{ vertical-align:top; padding-left: 0.5em; } '
'td.chapter { padding: 0 0.5em; text-align: right; } '
'table.toc td.pagenumber { text-align: right; '
'vertical-align:bottom; } td.section { padding-top: 1.1em;'
'font-weight: bold; } '
'/* End TOC **************************/ '
'pre { overflow: hidden; white-space: pre-wrap; } '
'h1, h2, h3, h4, h5, h6{ page-break-after: avoid; '
'page-break-inside: avoid; } '
'.page-break{ page-break-before: always; height: 7em;'
'display: block; } a { word-wrap: break-word; } '
'.objavi-no-page-break { page-break-inside: avoid; } '
'/*To force a blank page it is sometimes necessary to '
'add unseen content. Display:none and visibility: hidden'
' do not work -- the renderer realises that they are not '
'there and skips the page. So we add a tiny bit of text '
'beyond the margin of the page. */ '
'.unseen{ z-index: -66; margin-left: -1000pt; }'
'img { max-width: 95%; } p { word-wrap: break-word; }'
'li { word-wrap: break-word; }'
'#InsertNote_NoteList { word-wrap: break-word; } ')
BOOKTYPE_CSS_ODT = ('body {} #book-title { font-size: 64pt; '
'page-break-before: avoid; margin-bottom: 12em; '
'max-width: 700px;} .unseen { display: none;}'
'.chapter { color: #000;} h1 .initial { color: #000; '
'font-size: 2em;} body .subsection:first-child {} '
'h1 { page-break-before: always;} '
'.objavi-subsection{ text-transform: uppercase; '
'font-size: 20pt;} .objavi-subsection .initial { '
'font-size: 1em; color: #000;}'
'.objavi-subsection-heading{ font-size: 36pt; '
'font-weight: bold; page-break-before: always;} '
'table { float: none;} h1.frontpage{ font-size: 64pt; '
'text-align: center; max-width: 700px;} '
'div.copyright{ padding: 1em;} pre { max-width:700px; '
'overflow: hidden;} '
'img { max-width: 700px; height: auto;}')
| agpl-3.0 |
saumishr/django | django/contrib/comments/forms.py | 92 | 8086 | import time
from django import forms
from django.forms.util import ErrorDict
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.comments.models import Comment
from django.utils.crypto import salted_hmac, constant_time_compare
from django.utils.encoding import force_unicode
from django.utils.text import get_text_list
from django.utils import timezone
from django.utils.translation import ungettext, ugettext, ugettext_lazy as _
COMMENT_MAX_LENGTH = getattr(settings,'COMMENT_MAX_LENGTH', 3000)
class CommentSecurityForm(forms.Form):
"""
Handles the security aspects (anti-spoofing) for comment forms.
"""
content_type = forms.CharField(widget=forms.HiddenInput)
object_pk = forms.CharField(widget=forms.HiddenInput)
timestamp = forms.IntegerField(widget=forms.HiddenInput)
security_hash = forms.CharField(min_length=40, max_length=40, widget=forms.HiddenInput)
def __init__(self, target_object, data=None, initial=None):
self.target_object = target_object
if initial is None:
initial = {}
initial.update(self.generate_security_data())
super(CommentSecurityForm, self).__init__(data=data, initial=initial)
def security_errors(self):
"""Return just those errors associated with security"""
errors = ErrorDict()
for f in ["honeypot", "timestamp", "security_hash"]:
if f in self.errors:
errors[f] = self.errors[f]
return errors
def clean_security_hash(self):
"""Check the security hash."""
security_hash_dict = {
'content_type' : self.data.get("content_type", ""),
'object_pk' : self.data.get("object_pk", ""),
'timestamp' : self.data.get("timestamp", ""),
}
expected_hash = self.generate_security_hash(**security_hash_dict)
actual_hash = self.cleaned_data["security_hash"]
if not constant_time_compare(expected_hash, actual_hash):
raise forms.ValidationError("Security hash check failed.")
return actual_hash
def clean_timestamp(self):
"""Make sure the timestamp isn't too far (> 2 hours) in the past."""
ts = self.cleaned_data["timestamp"]
if time.time() - ts > (2 * 60 * 60):
raise forms.ValidationError("Timestamp check failed")
return ts
def generate_security_data(self):
"""Generate a dict of security data for "initial" data."""
timestamp = int(time.time())
security_dict = {
'content_type' : str(self.target_object._meta),
'object_pk' : str(self.target_object._get_pk_val()),
'timestamp' : str(timestamp),
'security_hash' : self.initial_security_hash(timestamp),
}
return security_dict
def initial_security_hash(self, timestamp):
"""
Generate the initial security hash from self.content_object
and a (unix) timestamp.
"""
initial_security_dict = {
'content_type' : str(self.target_object._meta),
'object_pk' : str(self.target_object._get_pk_val()),
'timestamp' : str(timestamp),
}
return self.generate_security_hash(**initial_security_dict)
def generate_security_hash(self, content_type, object_pk, timestamp):
"""
Generate a HMAC security hash from the provided info.
"""
info = (content_type, object_pk, timestamp)
key_salt = "django.contrib.forms.CommentSecurityForm"
value = "-".join(info)
return salted_hmac(key_salt, value).hexdigest()
class CommentDetailsForm(CommentSecurityForm):
"""
Handles the specific details of the comment (name, comment, etc.).
"""
name = forms.CharField(label=_("Name"), max_length=50)
email = forms.EmailField(label=_("Email address"))
url = forms.URLField(label=_("URL"), required=False)
comment = forms.CharField(label=_('Comment'), widget=forms.Textarea,
max_length=COMMENT_MAX_LENGTH)
def get_comment_object(self):
"""
Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).
"""
if not self.is_valid():
raise ValueError("get_comment_object may only be called on valid forms")
CommentModel = self.get_comment_model()
new = CommentModel(**self.get_comment_create_data())
new = self.check_for_duplicate_comment(new)
return new
def get_comment_model(self):
"""
Get the comment model to create with this form. Subclasses in custom
comment apps should override this, get_comment_create_data, and perhaps
check_for_duplicate_comment to provide custom comment models.
"""
return Comment
def get_comment_create_data(self):
"""
Returns the dict of data to be used to create a comment. Subclasses in
custom comment apps that override get_comment_model can override this
method to add extra fields onto a custom comment model.
"""
return dict(
content_type = ContentType.objects.get_for_model(self.target_object),
object_pk = force_unicode(self.target_object._get_pk_val()),
user_name = self.cleaned_data["name"],
user_email = self.cleaned_data["email"],
user_url = self.cleaned_data["url"],
comment = self.cleaned_data["comment"],
submit_date = timezone.now(),
site_id = settings.SITE_ID,
is_public = True,
is_removed = False,
)
def check_for_duplicate_comment(self, new):
"""
Check that a submitted comment isn't a duplicate. This might be caused
by someone posting a comment twice. If it is a dup, silently return the *previous* comment.
"""
possible_duplicates = self.get_comment_model()._default_manager.using(
self.target_object._state.db
).filter(
content_type = new.content_type,
object_pk = new.object_pk,
user_name = new.user_name,
user_email = new.user_email,
user_url = new.user_url,
)
for old in possible_duplicates:
if old.submit_date.date() == new.submit_date.date() and old.comment == new.comment:
return old
return new
def clean_comment(self):
"""
If COMMENTS_ALLOW_PROFANITIES is False, check that the comment doesn't
contain anything in PROFANITIES_LIST.
"""
comment = self.cleaned_data["comment"]
if settings.COMMENTS_ALLOW_PROFANITIES == False:
bad_words = [w for w in settings.PROFANITIES_LIST if w in comment.lower()]
if bad_words:
raise forms.ValidationError(ungettext(
"Watch your mouth! The word %s is not allowed here.",
"Watch your mouth! The words %s are not allowed here.",
len(bad_words)) % get_text_list(
['"%s%s%s"' % (i[0], '-'*(len(i)-2), i[-1])
for i in bad_words], ugettext('and')))
return comment
class CommentForm(CommentDetailsForm):
honeypot = forms.CharField(required=False,
label=_('If you enter anything in this field '\
'your comment will be treated as spam'))
def clean_honeypot(self):
"""Check that nothing's been entered into the honeypot."""
value = self.cleaned_data["honeypot"]
if value:
raise forms.ValidationError(self.fields["honeypot"].label)
return value
| bsd-3-clause |
Senseg/Py4A | python-modules/twisted/twisted/test/test_lockfile.py | 60 | 15409 | # Copyright (c) 2005 Divmod, Inc.
# Copyright (c) 2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.lockfile}.
"""
import os, errno
from twisted.trial import unittest
from twisted.python import lockfile
from twisted.python.runtime import platform
skipKill = None
if platform.isWindows():
try:
from win32api import OpenProcess
import pywintypes
except ImportError:
skipKill = ("On windows, lockfile.kill is not implemented in the "
"absence of win32api and/or pywintypes.")
class UtilTests(unittest.TestCase):
"""
Tests for the helper functions used to implement L{FilesystemLock}.
"""
def test_symlinkEEXIST(self):
"""
L{lockfile.symlink} raises L{OSError} with C{errno} set to L{EEXIST}
when an attempt is made to create a symlink which already exists.
"""
name = self.mktemp()
lockfile.symlink('foo', name)
exc = self.assertRaises(OSError, lockfile.symlink, 'foo', name)
self.assertEqual(exc.errno, errno.EEXIST)
def test_symlinkEIOWindows(self):
"""
L{lockfile.symlink} raises L{OSError} with C{errno} set to L{EIO} when
the underlying L{rename} call fails with L{EIO}.
Renaming a file on Windows may fail if the target of the rename is in
the process of being deleted (directory deletion appears not to be
atomic).
"""
name = self.mktemp()
def fakeRename(src, dst):
raise IOError(errno.EIO, None)
self.patch(lockfile, 'rename', fakeRename)
exc = self.assertRaises(IOError, lockfile.symlink, name, "foo")
self.assertEqual(exc.errno, errno.EIO)
if not platform.isWindows():
test_symlinkEIOWindows.skip = (
"special rename EIO handling only necessary and correct on "
"Windows.")
def test_readlinkENOENT(self):
"""
L{lockfile.readlink} raises L{OSError} with C{errno} set to L{ENOENT}
when an attempt is made to read a symlink which does not exist.
"""
name = self.mktemp()
exc = self.assertRaises(OSError, lockfile.readlink, name)
self.assertEqual(exc.errno, errno.ENOENT)
def test_readlinkEACCESWindows(self):
"""
L{lockfile.readlink} raises L{OSError} with C{errno} set to L{EACCES}
on Windows when the underlying file open attempt fails with C{EACCES}.
Opening a file on Windows may fail if the path is inside a directory
which is in the process of being deleted (directory deletion appears
not to be atomic).
"""
name = self.mktemp()
def fakeOpen(path, mode):
raise IOError(errno.EACCES, None)
self.patch(lockfile, '_open', fakeOpen)
exc = self.assertRaises(IOError, lockfile.readlink, name)
self.assertEqual(exc.errno, errno.EACCES)
if not platform.isWindows():
test_readlinkEACCESWindows.skip = (
"special readlink EACCES handling only necessary and correct on "
"Windows.")
def test_kill(self):
"""
L{lockfile.kill} returns without error if passed the PID of a
process which exists and signal C{0}.
"""
lockfile.kill(os.getpid(), 0)
test_kill.skip = skipKill
def test_killESRCH(self):
"""
L{lockfile.kill} raises L{OSError} with errno of L{ESRCH} if
passed a PID which does not correspond to any process.
"""
# Hopefully there is no process with PID 2 ** 31 - 1
exc = self.assertRaises(OSError, lockfile.kill, 2 ** 31 - 1, 0)
self.assertEqual(exc.errno, errno.ESRCH)
test_killESRCH.skip = skipKill
def test_noKillCall(self):
"""
Verify that when L{lockfile.kill} does end up as None (e.g. on Windows
without pywin32), it doesn't end up being called and raising a
L{TypeError}.
"""
self.patch(lockfile, "kill", None)
fl = lockfile.FilesystemLock(self.mktemp())
fl.lock()
self.assertFalse(fl.lock())
class LockingTestCase(unittest.TestCase):
def _symlinkErrorTest(self, errno):
def fakeSymlink(source, dest):
raise OSError(errno, None)
self.patch(lockfile, 'symlink', fakeSymlink)
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
exc = self.assertRaises(OSError, lock.lock)
self.assertEqual(exc.errno, errno)
def test_symlinkError(self):
"""
An exception raised by C{symlink} other than C{EEXIST} is passed up to
the caller of L{FilesystemLock.lock}.
"""
self._symlinkErrorTest(errno.ENOSYS)
def test_symlinkErrorPOSIX(self):
"""
An L{OSError} raised by C{symlink} on a POSIX platform with an errno of
C{EACCES} or C{EIO} is passed to the caller of L{FilesystemLock.lock}.
On POSIX, unlike on Windows, these are unexpected errors which cannot
be handled by L{FilesystemLock}.
"""
self._symlinkErrorTest(errno.EACCES)
self._symlinkErrorTest(errno.EIO)
if platform.isWindows():
test_symlinkErrorPOSIX.skip = (
"POSIX-specific error propagation not expected on Windows.")
def test_cleanlyAcquire(self):
"""
If the lock has never been held, it can be acquired and the C{clean}
and C{locked} attributes are set to C{True}.
"""
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
self.assertTrue(lock.lock())
self.assertTrue(lock.clean)
self.assertTrue(lock.locked)
def test_cleanlyRelease(self):
"""
If a lock is released cleanly, it can be re-acquired and the C{clean}
and C{locked} attributes are set to C{True}.
"""
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
self.assertTrue(lock.lock())
lock.unlock()
self.assertFalse(lock.locked)
lock = lockfile.FilesystemLock(lockf)
self.assertTrue(lock.lock())
self.assertTrue(lock.clean)
self.assertTrue(lock.locked)
def test_cannotLockLocked(self):
"""
If a lock is currently locked, it cannot be locked again.
"""
lockf = self.mktemp()
firstLock = lockfile.FilesystemLock(lockf)
self.assertTrue(firstLock.lock())
secondLock = lockfile.FilesystemLock(lockf)
self.assertFalse(secondLock.lock())
self.assertFalse(secondLock.locked)
def test_uncleanlyAcquire(self):
"""
If a lock was held by a process which no longer exists, it can be
acquired, the C{clean} attribute is set to C{False}, and the
C{locked} attribute is set to C{True}.
"""
owner = 12345
def fakeKill(pid, signal):
if signal != 0:
raise OSError(errno.EPERM, None)
if pid == owner:
raise OSError(errno.ESRCH, None)
lockf = self.mktemp()
self.patch(lockfile, 'kill', fakeKill)
lockfile.symlink(str(owner), lockf)
lock = lockfile.FilesystemLock(lockf)
self.assertTrue(lock.lock())
self.assertFalse(lock.clean)
self.assertTrue(lock.locked)
self.assertEqual(lockfile.readlink(lockf), str(os.getpid()))
def test_lockReleasedBeforeCheck(self):
"""
If the lock is initially held but then released before it can be
examined to determine if the process which held it still exists, it is
acquired and the C{clean} and C{locked} attributes are set to C{True}.
"""
def fakeReadlink(name):
# Pretend to be another process releasing the lock.
lockfile.rmlink(lockf)
# Fall back to the real implementation of readlink.
readlinkPatch.restore()
return lockfile.readlink(name)
readlinkPatch = self.patch(lockfile, 'readlink', fakeReadlink)
def fakeKill(pid, signal):
if signal != 0:
raise OSError(errno.EPERM, None)
if pid == 43125:
raise OSError(errno.ESRCH, None)
self.patch(lockfile, 'kill', fakeKill)
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
lockfile.symlink(str(43125), lockf)
self.assertTrue(lock.lock())
self.assertTrue(lock.clean)
self.assertTrue(lock.locked)
def test_lockReleasedDuringAcquireSymlink(self):
"""
If the lock is released while an attempt is made to acquire
it, the lock attempt fails and C{FilesystemLock.lock} returns
C{False}. This can happen on Windows when L{lockfile.symlink}
fails with L{IOError} of C{EIO} because another process is in
the middle of a call to L{os.rmdir} (implemented in terms of
RemoveDirectory) which is not atomic.
"""
def fakeSymlink(src, dst):
# While another process id doing os.rmdir which the Windows
# implementation of rmlink does, a rename call will fail with EIO.
raise OSError(errno.EIO, None)
self.patch(lockfile, 'symlink', fakeSymlink)
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
self.assertFalse(lock.lock())
self.assertFalse(lock.locked)
if not platform.isWindows():
test_lockReleasedDuringAcquireSymlink.skip = (
"special rename EIO handling only necessary and correct on "
"Windows.")
def test_lockReleasedDuringAcquireReadlink(self):
"""
If the lock is initially held but is released while an attempt
is made to acquire it, the lock attempt fails and
L{FilesystemLock.lock} returns C{False}.
"""
def fakeReadlink(name):
# While another process is doing os.rmdir which the
# Windows implementation of rmlink does, a readlink call
# will fail with EACCES.
raise IOError(errno.EACCES, None)
readlinkPatch = self.patch(lockfile, 'readlink', fakeReadlink)
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
lockfile.symlink(str(43125), lockf)
self.assertFalse(lock.lock())
self.assertFalse(lock.locked)
if not platform.isWindows():
test_lockReleasedDuringAcquireReadlink.skip = (
"special readlink EACCES handling only necessary and correct on "
"Windows.")
def _readlinkErrorTest(self, exceptionType, errno):
def fakeReadlink(name):
raise exceptionType(errno, None)
self.patch(lockfile, 'readlink', fakeReadlink)
lockf = self.mktemp()
# Make it appear locked so it has to use readlink
lockfile.symlink(str(43125), lockf)
lock = lockfile.FilesystemLock(lockf)
exc = self.assertRaises(exceptionType, lock.lock)
self.assertEqual(exc.errno, errno)
self.assertFalse(lock.locked)
def test_readlinkError(self):
"""
An exception raised by C{readlink} other than C{ENOENT} is passed up to
the caller of L{FilesystemLock.lock}.
"""
self._readlinkErrorTest(OSError, errno.ENOSYS)
self._readlinkErrorTest(IOError, errno.ENOSYS)
def test_readlinkErrorPOSIX(self):
"""
Any L{IOError} raised by C{readlink} on a POSIX platform passed to the
caller of L{FilesystemLock.lock}.
On POSIX, unlike on Windows, these are unexpected errors which cannot
be handled by L{FilesystemLock}.
"""
self._readlinkErrorTest(IOError, errno.ENOSYS)
self._readlinkErrorTest(IOError, errno.EACCES)
if platform.isWindows():
test_readlinkErrorPOSIX.skip = (
"POSIX-specific error propagation not expected on Windows.")
def test_lockCleanedUpConcurrently(self):
"""
If a second process cleans up the lock after a first one checks the
lock and finds that no process is holding it, the first process does
not fail when it tries to clean up the lock.
"""
def fakeRmlink(name):
rmlinkPatch.restore()
# Pretend to be another process cleaning up the lock.
lockfile.rmlink(lockf)
# Fall back to the real implementation of rmlink.
return lockfile.rmlink(name)
rmlinkPatch = self.patch(lockfile, 'rmlink', fakeRmlink)
def fakeKill(pid, signal):
if signal != 0:
raise OSError(errno.EPERM, None)
if pid == 43125:
raise OSError(errno.ESRCH, None)
self.patch(lockfile, 'kill', fakeKill)
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
lockfile.symlink(str(43125), lockf)
self.assertTrue(lock.lock())
self.assertTrue(lock.clean)
self.assertTrue(lock.locked)
def test_rmlinkError(self):
"""
An exception raised by L{rmlink} other than C{ENOENT} is passed up
to the caller of L{FilesystemLock.lock}.
"""
def fakeRmlink(name):
raise OSError(errno.ENOSYS, None)
self.patch(lockfile, 'rmlink', fakeRmlink)
def fakeKill(pid, signal):
if signal != 0:
raise OSError(errno.EPERM, None)
if pid == 43125:
raise OSError(errno.ESRCH, None)
self.patch(lockfile, 'kill', fakeKill)
lockf = self.mktemp()
# Make it appear locked so it has to use readlink
lockfile.symlink(str(43125), lockf)
lock = lockfile.FilesystemLock(lockf)
exc = self.assertRaises(OSError, lock.lock)
self.assertEqual(exc.errno, errno.ENOSYS)
self.assertFalse(lock.locked)
def test_killError(self):
"""
If L{kill} raises an exception other than L{OSError} with errno set to
C{ESRCH}, the exception is passed up to the caller of
L{FilesystemLock.lock}.
"""
def fakeKill(pid, signal):
raise OSError(errno.EPERM, None)
self.patch(lockfile, 'kill', fakeKill)
lockf = self.mktemp()
# Make it appear locked so it has to use readlink
lockfile.symlink(str(43125), lockf)
lock = lockfile.FilesystemLock(lockf)
exc = self.assertRaises(OSError, lock.lock)
self.assertEqual(exc.errno, errno.EPERM)
self.assertFalse(lock.locked)
def test_unlockOther(self):
"""
L{FilesystemLock.unlock} raises L{ValueError} if called for a lock
which is held by a different process.
"""
lockf = self.mktemp()
lockfile.symlink(str(os.getpid() + 1), lockf)
lock = lockfile.FilesystemLock(lockf)
self.assertRaises(ValueError, lock.unlock)
def test_isLocked(self):
"""
L{isLocked} returns C{True} if the named lock is currently locked,
C{False} otherwise.
"""
lockf = self.mktemp()
self.assertFalse(lockfile.isLocked(lockf))
lock = lockfile.FilesystemLock(lockf)
self.assertTrue(lock.lock())
self.assertTrue(lockfile.isLocked(lockf))
lock.unlock()
self.assertFalse(lockfile.isLocked(lockf))
| apache-2.0 |
onethirtyfive/skadi | skadi/protoc/dota_usermessages_pb2.py | 2 | 178517 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: dota_usermessages.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
import google.protobuf.descriptor_pb2
import networkbasetypes_pb2
import ai_activity_pb2
import dota_commonmessages_pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='dota_usermessages.proto',
package='',
serialized_pb='\n\x17\x64ota_usermessages.proto\x1a google/protobuf/descriptor.proto\x1a\x16networkbasetypes.proto\x1a\x11\x61i_activity.proto\x1a\x19\x64ota_commonmessages.proto\"+\n\x18\x43\x44OTAUserMsg_AIDebugLine\x12\x0f\n\x07message\x18\x01 \x01(\t\"$\n\x11\x43\x44OTAUserMsg_Ping\x12\x0f\n\x07message\x18\x01 \x01(\t\",\n\x17\x43\x44OTAUserMsg_SwapVerify\x12\x11\n\tplayer_id\x18\x01 \x01(\r\"\xef\x01\n\x16\x43\x44OTAUserMsg_ChatEvent\x12\x36\n\x04type\x18\x01 \x02(\x0e\x32\x12.DOTA_CHAT_MESSAGE:\x14\x43HAT_MESSAGE_INVALID\x12\r\n\x05value\x18\x02 \x01(\r\x12\x16\n\nplayerid_1\x18\x03 \x01(\x11:\x02-1\x12\x16\n\nplayerid_2\x18\x04 \x01(\x11:\x02-1\x12\x16\n\nplayerid_3\x18\x05 \x01(\x11:\x02-1\x12\x16\n\nplayerid_4\x18\x06 \x01(\x11:\x02-1\x12\x16\n\nplayerid_5\x18\x07 \x01(\x11:\x02-1\x12\x16\n\nplayerid_6\x18\x08 \x01(\x11:\x02-1\"\xfd\x01\n\x1a\x43\x44OTAUserMsg_CombatLogData\x12:\n\x04type\x18\x01 \x01(\x0e\x32\x15.DOTA_COMBATLOG_TYPES:\x15\x44OTA_COMBATLOG_DAMAGE\x12\x13\n\x0btarget_name\x18\x02 \x01(\r\x12\x15\n\rattacker_name\x18\x03 \x01(\r\x12\x19\n\x11\x61ttacker_illusion\x18\x04 \x01(\x08\x12\x17\n\x0ftarget_illusion\x18\x05 \x01(\x08\x12\x16\n\x0einflictor_name\x18\x06 \x01(\r\x12\r\n\x05value\x18\x07 \x01(\x05\x12\x0e\n\x06health\x18\x08 \x01(\x05\x12\x0c\n\x04time\x18\t \x01(\x02\"!\n\x1f\x43\x44OTAUserMsg_CombatLogShowDeath\"Z\n\x14\x43\x44OTAUserMsg_BotChat\x12\x11\n\tplayer_id\x18\x01 \x01(\r\x12\x0e\n\x06\x66ormat\x18\x02 \x01(\t\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x0e\n\x06target\x18\x04 \x01(\t\"q\n CDOTAUserMsg_CombatHeroPositions\x12\r\n\x05index\x18\x01 \x01(\r\x12\x0c\n\x04time\x18\x02 \x01(\x05\x12 \n\tworld_pos\x18\x03 \x01(\x0b\x32\r.CMsgVector2D\x12\x0e\n\x06health\x18\x04 \x01(\x05\"\xfd\x01\n\x1c\x43\x44OTAUserMsg_MiniKillCamInfo\x12\x39\n\tattackers\x18\x01 \x03(\x0b\x32&.CDOTAUserMsg_MiniKillCamInfo.Attacker\x1a\xa1\x01\n\x08\x41ttacker\x12\x10\n\x08\x61ttacker\x18\x01 \x01(\r\x12\x14\n\x0ctotal_damage\x18\x02 \x01(\x05\x12\x41\n\tabilities\x18\x03 \x03(\x0b\x32..CDOTAUserMsg_MiniKillCamInfo.Attacker.Ability\x1a*\n\x07\x41\x62ility\x12\x0f\n\x07\x61\x62ility\x18\x01 \x01(\r\x12\x0e\n\x06\x64\x61mage\x18\x02 \x01(\x05\"@\n\x1d\x43\x44OTAUserMsg_GlobalLightColor\x12\r\n\x05\x63olor\x18\x01 \x01(\r\x12\x10\n\x08\x64uration\x18\x02 \x01(\x02\"U\n!CDOTAUserMsg_GlobalLightDirection\x12\x1e\n\tdirection\x18\x01 \x01(\x0b\x32\x0b.CMsgVector\x12\x10\n\x08\x64uration\x18\x02 \x01(\x02\"]\n\x19\x43\x44OTAUserMsg_LocationPing\x12\x11\n\tplayer_id\x18\x01 \x01(\r\x12-\n\rlocation_ping\x18\x02 \x01(\x0b\x32\x16.CDOTAMsg_LocationPing\"T\n\x16\x43\x44OTAUserMsg_ItemAlert\x12\x11\n\tplayer_id\x18\x01 \x01(\r\x12\'\n\nitem_alert\x18\x02 \x01(\x0b\x32\x13.CDOTAMsg_ItemAlert\"n\n\x19\x43\x44OTAUserMsg_MinimapEvent\x12\x12\n\nevent_type\x18\x01 \x01(\x05\x12\x15\n\rentity_handle\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x05\x12\t\n\x01y\x18\x04 \x01(\x05\x12\x10\n\x08\x64uration\x18\x05 \x01(\x05\"M\n\x14\x43\x44OTAUserMsg_MapLine\x12\x11\n\tplayer_id\x18\x01 \x01(\x05\x12\"\n\x07mapline\x18\x02 \x01(\x0b\x32\x11.CDOTAMsg_MapLine\"n\n\x1e\x43\x44OTAUserMsg_MinimapDebugPoint\x12\x1d\n\x08location\x18\x01 \x01(\x0b\x32\x0b.CMsgVector\x12\r\n\x05\x63olor\x18\x02 \x01(\r\x12\x0c\n\x04size\x18\x03 \x01(\x05\x12\x10\n\x08\x64uration\x18\x04 \x01(\x02\"\xae\x01\n#CDOTAUserMsg_CreateLinearProjectile\x12\x1b\n\x06origin\x18\x01 \x01(\x0b\x32\x0b.CMsgVector\x12\x1f\n\x08velocity\x18\x02 \x01(\x0b\x32\r.CMsgVector2D\x12\x0f\n\x07latency\x18\x03 \x01(\x05\x12\x10\n\x08\x65ntindex\x18\x04 \x01(\x05\x12\x16\n\x0eparticle_index\x18\x05 \x01(\x05\x12\x0e\n\x06handle\x18\x06 \x01(\x05\"6\n$CDOTAUserMsg_DestroyLinearProjectile\x12\x0e\n\x06handle\x18\x01 \x01(\x05\"9\n%CDOTAUserMsg_DodgeTrackingProjectiles\x12\x10\n\x08\x65ntindex\x18\x01 \x02(\x05\"_\n!CDOTAUserMsg_SpectatorPlayerClick\x12\x10\n\x08\x65ntindex\x18\x01 \x02(\x05\x12\x12\n\norder_type\x18\x02 \x01(\x05\x12\x14\n\x0ctarget_index\x18\x03 \x01(\x05\"b\n\x1d\x43\x44OTAUserMsg_NevermoreRequiem\x12\x15\n\rentity_handle\x18\x01 \x01(\x05\x12\r\n\x05lines\x18\x02 \x01(\x05\x12\x1b\n\x06origin\x18\x03 \x01(\x0b\x32\x0b.CMsgVector\".\n\x1b\x43\x44OTAUserMsg_InvalidCommand\x12\x0f\n\x07message\x18\x01 \x01(\t\")\n\x15\x43\x44OTAUserMsg_HudError\x12\x10\n\x08order_id\x18\x01 \x01(\x05\"c\n\x1b\x43\x44OTAUserMsg_SharedCooldown\x12\x10\n\x08\x65ntindex\x18\x01 \x01(\x05\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x63ooldown\x18\x03 \x01(\x02\x12\x12\n\nname_index\x18\x04 \x01(\x05\"/\n\x1f\x43\x44OTAUserMsg_SetNextAutobuyItem\x12\x0c\n\x04name\x18\x01 \x01(\t\"X\n\x1b\x43\x44OTAUserMsg_HalloweenDrops\x12\x11\n\titem_defs\x18\x01 \x03(\r\x12\x12\n\nplayer_ids\x18\x02 \x03(\r\x12\x12\n\nprize_list\x18\x03 \x01(\r\"\xfe\x01\n\x1c\x43\x44OTAResponseQuerySerialized\x12\x31\n\x05\x66\x61\x63ts\x18\x01 \x03(\x0b\x32\".CDOTAResponseQuerySerialized.Fact\x1a\xaa\x01\n\x04\x46\x61\x63t\x12\x0b\n\x03key\x18\x01 \x02(\x05\x12\x46\n\x07valtype\x18\x02 \x02(\x0e\x32,.CDOTAResponseQuerySerialized.Fact.ValueType:\x07NUMERIC\x12\x13\n\x0bval_numeric\x18\x03 \x01(\x02\x12\x12\n\nval_string\x18\x04 \x01(\t\"$\n\tValueType\x12\x0b\n\x07NUMERIC\x10\x01\x12\n\n\x06STRING\x10\x02\"\x90\x01\n\x18\x43\x44OTASpeechMatchOnClient\x12\x0f\n\x07\x63oncept\x18\x01 \x01(\x05\x12\x16\n\x0erecipient_type\x18\x02 \x01(\x05\x12\x34\n\rresponsequery\x18\x03 \x01(\x0b\x32\x1d.CDOTAResponseQuerySerialized\x12\x15\n\nrandomseed\x18\x04 \x01(\x0f:\x01\x30\"\xb0\x07\n\x16\x43\x44OTAUserMsg_UnitEvent\x12\x38\n\x08msg_type\x18\x01 \x02(\x0e\x32\x14.EDotaEntityMessages:\x10\x44OTA_UNIT_SPEECH\x12\x14\n\x0c\x65ntity_index\x18\x02 \x02(\x05\x12.\n\x06speech\x18\x03 \x01(\x0b\x32\x1e.CDOTAUserMsg_UnitEvent.Speech\x12\x37\n\x0bspeech_mute\x18\x04 \x01(\x0b\x32\".CDOTAUserMsg_UnitEvent.SpeechMute\x12\x37\n\x0b\x61\x64\x64_gesture\x18\x05 \x01(\x0b\x32\".CDOTAUserMsg_UnitEvent.AddGesture\x12=\n\x0eremove_gesture\x18\x06 \x01(\x0b\x32%.CDOTAUserMsg_UnitEvent.RemoveGesture\x12\x39\n\x0c\x62lood_impact\x18\x07 \x01(\x0b\x32#.CDOTAUserMsg_UnitEvent.BloodImpact\x12\x39\n\x0c\x66\x61\x64\x65_gesture\x18\x08 \x01(\x0b\x32#.CDOTAUserMsg_UnitEvent.FadeGesture\x12\x39\n\x16speech_match_on_client\x18\t \x01(\x0b\x32\x19.CDOTASpeechMatchOnClient\x1ak\n\x06Speech\x12\x0f\n\x07\x63oncept\x18\x01 \x01(\x05\x12\x10\n\x08response\x18\x02 \x01(\t\x12\x16\n\x0erecipient_type\x18\x03 \x01(\x05\x12\r\n\x05level\x18\x04 \x01(\x05\x12\x17\n\x08muteable\x18\x05 \x01(\x08:\x05\x66\x61lse\x1a \n\nSpeechMute\x12\x12\n\x05\x64\x65lay\x18\x01 \x01(\x02:\x03\x30.5\x1ao\n\nAddGesture\x12(\n\x08\x61\x63tivity\x18\x01 \x01(\x0e\x32\t.Activity:\x0b\x41\x43T_INVALID\x12\x0c\n\x04slot\x18\x02 \x01(\x05\x12\x12\n\x07\x66\x61\x64\x65_in\x18\x03 \x01(\x02:\x01\x30\x12\x15\n\x08\x66\x61\x64\x65_out\x18\x04 \x01(\x02:\x03\x30.1\x1a\x39\n\rRemoveGesture\x12(\n\x08\x61\x63tivity\x18\x01 \x01(\x0e\x32\t.Activity:\x0b\x41\x43T_INVALID\x1a@\n\x0b\x42loodImpact\x12\r\n\x05scale\x18\x01 \x01(\x05\x12\x10\n\x08x_normal\x18\x02 \x01(\x05\x12\x10\n\x08y_normal\x18\x03 \x01(\x05\x1a\x37\n\x0b\x46\x61\x64\x65Gesture\x12(\n\x08\x61\x63tivity\x18\x01 \x01(\x0e\x32\t.Activity:\x0b\x41\x43T_INVALID\"0\n\x1a\x43\x44OTAUserMsg_ItemPurchased\x12\x12\n\nitem_index\x18\x01 \x01(\x05\"j\n\x16\x43\x44OTAUserMsg_ItemFound\x12\x0e\n\x06player\x18\x01 \x01(\x05\x12\x0f\n\x07quality\x18\x02 \x01(\x05\x12\x0e\n\x06rarity\x18\x03 \x01(\x05\x12\x0e\n\x06method\x18\x04 \x01(\x05\x12\x0f\n\x07itemdef\x18\x05 \x01(\x05\"\xf2\x0f\n\x1c\x43\x44OTAUserMsg_ParticleManager\x12H\n\x04type\x18\x01 \x02(\x0e\x32\x16.DOTA_PARTICLE_MESSAGE:\"DOTA_PARTICLE_MANAGER_EVENT_CREATE\x12\r\n\x05index\x18\x02 \x02(\r\x12R\n\x16release_particle_index\x18\x03 \x01(\x0b\x32\x32.CDOTAUserMsg_ParticleManager.ReleaseParticleIndex\x12\x45\n\x0f\x63reate_particle\x18\x04 \x01(\x0b\x32,.CDOTAUserMsg_ParticleManager.CreateParticle\x12G\n\x10\x64\x65stroy_particle\x18\x05 \x01(\x0b\x32-.CDOTAUserMsg_ParticleManager.DestroyParticle\x12Z\n\x1a\x64\x65stroy_particle_involving\x18\x06 \x01(\x0b\x32\x36.CDOTAUserMsg_ParticleManager.DestroyParticleInvolving\x12\x45\n\x0fupdate_particle\x18\x07 \x01(\x0b\x32,.CDOTAUserMsg_ParticleManager.UpdateParticle\x12L\n\x13update_particle_fwd\x18\x08 \x01(\x0b\x32/.CDOTAUserMsg_ParticleManager.UpdateParticleFwd\x12R\n\x16update_particle_orient\x18\t \x01(\x0b\x32\x32.CDOTAUserMsg_ParticleManager.UpdateParticleOrient\x12V\n\x18update_particle_fallback\x18\n \x01(\x0b\x32\x34.CDOTAUserMsg_ParticleManager.UpdateParticleFallback\x12R\n\x16update_particle_offset\x18\x0b \x01(\x0b\x32\x32.CDOTAUserMsg_ParticleManager.UpdateParticleOffset\x12L\n\x13update_particle_ent\x18\x0c \x01(\x0b\x32/.CDOTAUserMsg_ParticleManager.UpdateParticleEnt\x12[\n\x1bupdate_particle_should_draw\x18\x0e \x01(\x0b\x32\x36.CDOTAUserMsg_ParticleManager.UpdateParticleShouldDraw\x12Y\n\x1aupdate_particle_set_frozen\x18\x0f \x01(\x0b\x32\x35.CDOTAUserMsg_ParticleManager.UpdateParticleSetFrozen\x1a\x16\n\x14ReleaseParticleIndex\x1aY\n\x0e\x43reateParticle\x12\x1b\n\x13particle_name_index\x18\x01 \x01(\x05\x12\x13\n\x0b\x61ttach_type\x18\x02 \x01(\x05\x12\x15\n\rentity_handle\x18\x03 \x01(\x05\x1a.\n\x0f\x44\x65stroyParticle\x12\x1b\n\x13\x64\x65stroy_immediately\x18\x01 \x01(\x08\x1aN\n\x18\x44\x65stroyParticleInvolving\x12\x1b\n\x13\x64\x65stroy_immediately\x18\x01 \x01(\x08\x12\x15\n\rentity_handle\x18\x03 \x01(\x05\x1a\x46\n\x0eUpdateParticle\x12\x15\n\rcontrol_point\x18\x01 \x01(\x05\x12\x1d\n\x08position\x18\x02 \x01(\x0b\x32\x0b.CMsgVector\x1aH\n\x11UpdateParticleFwd\x12\x15\n\rcontrol_point\x18\x01 \x01(\x05\x12\x1c\n\x07\x66orward\x18\x02 \x01(\x0b\x32\x0b.CMsgVector\x1a\x80\x01\n\x14UpdateParticleOrient\x12\x15\n\rcontrol_point\x18\x01 \x01(\x05\x12\x1c\n\x07\x66orward\x18\x02 \x01(\x0b\x32\x0b.CMsgVector\x12\x1a\n\x05right\x18\x03 \x01(\x0b\x32\x0b.CMsgVector\x12\x17\n\x02up\x18\x04 \x01(\x0b\x32\x0b.CMsgVector\x1aN\n\x16UpdateParticleFallback\x12\x15\n\rcontrol_point\x18\x01 \x01(\x05\x12\x1d\n\x08position\x18\x02 \x01(\x0b\x32\x0b.CMsgVector\x1aQ\n\x14UpdateParticleOffset\x12\x15\n\rcontrol_point\x18\x01 \x01(\x05\x12\"\n\rorigin_offset\x18\x02 \x01(\x0b\x32\x0b.CMsgVector\x1a\x92\x01\n\x11UpdateParticleEnt\x12\x15\n\rcontrol_point\x18\x01 \x01(\x05\x12\x15\n\rentity_handle\x18\x02 \x01(\x05\x12\x13\n\x0b\x61ttach_type\x18\x03 \x01(\x05\x12\x12\n\nattachment\x18\x04 \x01(\x05\x12&\n\x11\x66\x61llback_position\x18\x05 \x01(\x0b\x32\x0b.CMsgVector\x1a-\n\x17UpdateParticleSetFrozen\x12\x12\n\nset_frozen\x18\x01 \x01(\x08\x1a/\n\x18UpdateParticleShouldDraw\x12\x13\n\x0bshould_draw\x18\x01 \x01(\x08\"\xc5\x01\n\x1a\x43\x44OTAUserMsg_OverheadEvent\x12?\n\x0cmessage_type\x18\x01 \x02(\x0e\x32\x14.DOTA_OVERHEAD_ALERT:\x13OVERHEAD_ALERT_GOLD\x12\r\n\x05value\x18\x02 \x01(\x05\x12\x1e\n\x16target_player_entindex\x18\x03 \x01(\x05\x12\x17\n\x0ftarget_entindex\x18\x04 \x01(\x05\x12\x1e\n\x16source_player_entindex\x18\x05 \x01(\x05\">\n\x1c\x43\x44OTAUserMsg_TutorialTipInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x10\n\x08progress\x18\x02 \x01(\x05\"]\n\x1b\x43\x44OTAUserMsg_TutorialFinish\x12\x0f\n\x07heading\x18\x01 \x01(\t\x12\x0e\n\x06\x65mblem\x18\x02 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\t\x12\x0f\n\x07success\x18\x04 \x01(\x08\"_\n\x1f\x43\x44OTAUserMsg_SendGenericToolTip\x12\r\n\x05title\x18\x01 \x01(\t\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x10\n\x08\x65ntindex\x18\x03 \x01(\x05\x12\r\n\x05\x63lose\x18\x04 \x01(\x08\"S\n\x16\x43\x44OTAUserMsg_WorldLine\x12\x11\n\tplayer_id\x18\x01 \x01(\x05\x12&\n\tworldline\x18\x02 \x01(\x0b\x32\x13.CDOTAMsg_WorldLine\"F\n\x1b\x43\x44OTAUserMsg_TournamentDrop\x12\x13\n\x0bwinner_name\x18\x01 \x01(\t\x12\x12\n\nevent_type\x18\x02 \x01(\x05\"|\n\x16\x43\x44OTAUserMsg_ChatWheel\x12;\n\x0c\x63hat_message\x18\x01 \x01(\x0e\x32\x16.EDOTAChatWheelMessage:\rk_EDOTA_CW_Ok\x12\x11\n\tplayer_id\x18\x02 \x01(\r\x12\x12\n\naccount_id\x18\x03 \x01(\r\"]\n\x1d\x43\x44OTAUserMsg_ReceivedXmasGift\x12\x11\n\tplayer_id\x18\x01 \x01(\x05\x12\x11\n\titem_name\x18\x02 \x01(\t\x12\x16\n\x0einventory_slot\x18\x03 \x01(\x05\",\n\x17\x43\x44OTAUserMsg_ShowSurvey\x12\x11\n\tsurvey_id\x18\x01 \x01(\x05\"5\n CDOTAUserMsg_UpdateSharedContent\x12\x11\n\tslot_type\x18\x01 \x01(\x05\"!\n\x1f\x43\x44OTAUserMsg_TutorialRequestExp\".\n\x19\x43\x44OTAUserMsg_TutorialFade\x12\x11\n\ttgt_alpha\x18\x01 \x01(\x05\"x\n CDOTAUserMsg_TutorialPingMinimap\x12\x11\n\tplayer_id\x18\x01 \x01(\r\x12\r\n\x05pos_x\x18\x02 \x01(\x02\x12\r\n\x05pos_y\x18\x03 \x01(\x02\x12\r\n\x05pos_z\x18\x04 \x01(\x02\x12\x14\n\x0c\x65ntity_index\x18\x05 \x01(\x05\"/\n\x1e\x43\x44OTA_UM_GamerulesStateChanged\x12\r\n\x05state\x18\x01 \x01(\r\"h\n\x1d\x43\x44OTAUserMsg_AddQuestLogEntry\x12\x10\n\x08npc_name\x18\x01 \x01(\t\x12\x12\n\nnpc_dialog\x18\x02 \x01(\t\x12\r\n\x05quest\x18\x03 \x01(\x08\x12\x12\n\nquest_type\x18\x04 \x01(\x05\"[\n\x1a\x43\x44OTAUserMsg_SendStatPopup\x12\x11\n\tplayer_id\x18\x01 \x01(\x05\x12*\n\tstatpopup\x18\x02 \x01(\x0b\x32\x17.CDOTAMsg_SendStatPopup\"C\n\x1c\x43\x44OTAUserMsg_SendRoshanPopup\x12\x11\n\treclaimed\x18\x01 \x01(\x08\x12\x10\n\x08gametime\x18\x02 \x01(\x05\"L\n\x1a\x43\x44OTAUserMsg_SendFinalGold\x12\x15\n\rreliable_gold\x18\x01 \x03(\r\x12\x17\n\x0funreliable_gold\x18\x02 \x03(\r*\xa0\x0b\n\x11\x45\x44otaUserMessages\x12\x1e\n\x1a\x44OTA_UM_AddUnitToSelection\x10@\x12\x17\n\x13\x44OTA_UM_AIDebugLine\x10\x41\x12\x15\n\x11\x44OTA_UM_ChatEvent\x10\x42\x12\x1f\n\x1b\x44OTA_UM_CombatHeroPositions\x10\x43\x12\x19\n\x15\x44OTA_UM_CombatLogData\x10\x44\x12\x1e\n\x1a\x44OTA_UM_CombatLogShowDeath\x10\x46\x12\"\n\x1e\x44OTA_UM_CreateLinearProjectile\x10G\x12#\n\x1f\x44OTA_UM_DestroyLinearProjectile\x10H\x12$\n DOTA_UM_DodgeTrackingProjectiles\x10I\x12\x1c\n\x18\x44OTA_UM_GlobalLightColor\x10J\x12 \n\x1c\x44OTA_UM_GlobalLightDirection\x10K\x12\x1a\n\x16\x44OTA_UM_InvalidCommand\x10L\x12\x18\n\x14\x44OTA_UM_LocationPing\x10M\x12\x13\n\x0f\x44OTA_UM_MapLine\x10N\x12\x1b\n\x17\x44OTA_UM_MiniKillCamInfo\x10O\x12\x1d\n\x19\x44OTA_UM_MinimapDebugPoint\x10P\x12\x18\n\x14\x44OTA_UM_MinimapEvent\x10Q\x12\x1c\n\x18\x44OTA_UM_NevermoreRequiem\x10R\x12\x19\n\x15\x44OTA_UM_OverheadEvent\x10S\x12\x1e\n\x1a\x44OTA_UM_SetNextAutobuyItem\x10T\x12\x1a\n\x16\x44OTA_UM_SharedCooldown\x10U\x12 \n\x1c\x44OTA_UM_SpectatorPlayerClick\x10V\x12\x1b\n\x17\x44OTA_UM_TutorialTipInfo\x10W\x12\x15\n\x11\x44OTA_UM_UnitEvent\x10X\x12\x1b\n\x17\x44OTA_UM_ParticleManager\x10Y\x12\x13\n\x0f\x44OTA_UM_BotChat\x10Z\x12\x14\n\x10\x44OTA_UM_HudError\x10[\x12\x19\n\x15\x44OTA_UM_ItemPurchased\x10\\\x12\x10\n\x0c\x44OTA_UM_Ping\x10]\x12\x15\n\x11\x44OTA_UM_ItemFound\x10^\x12!\n\x1d\x44OTA_UM_CharacterSpeakConcept\x10_\x12\x16\n\x12\x44OTA_UM_SwapVerify\x10`\x12\x15\n\x11\x44OTA_UM_WorldLine\x10\x61\x12\x1a\n\x16\x44OTA_UM_TournamentDrop\x10\x62\x12\x15\n\x11\x44OTA_UM_ItemAlert\x10\x63\x12\x1a\n\x16\x44OTA_UM_HalloweenDrops\x10\x64\x12\x15\n\x11\x44OTA_UM_ChatWheel\x10\x65\x12\x1c\n\x18\x44OTA_UM_ReceivedXmasGift\x10\x66\x12\x1f\n\x1b\x44OTA_UM_UpdateSharedContent\x10g\x12\x1e\n\x1a\x44OTA_UM_TutorialRequestExp\x10h\x12\x1f\n\x1b\x44OTA_UM_TutorialPingMinimap\x10i\x12!\n\x1d\x44OTA_UM_GamerulesStateChanged\x10j\x12\x16\n\x12\x44OTA_UM_ShowSurvey\x10k\x12\x18\n\x14\x44OTA_UM_TutorialFade\x10l\x12\x1c\n\x18\x44OTA_UM_AddQuestLogEntry\x10m\x12\x19\n\x15\x44OTA_UM_SendStatPopup\x10n\x12\x1a\n\x16\x44OTA_UM_TutorialFinish\x10o\x12\x1b\n\x17\x44OTA_UM_SendRoshanPopup\x10p\x12\x1e\n\x1a\x44OTA_UM_SendGenericToolTip\x10q\x12\x19\n\x15\x44OTA_UM_SendFinalGold\x10r*\xe3\x0e\n\x11\x44OTA_CHAT_MESSAGE\x12!\n\x14\x43HAT_MESSAGE_INVALID\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x12\x1a\n\x16\x43HAT_MESSAGE_HERO_KILL\x10\x00\x12\x1a\n\x16\x43HAT_MESSAGE_HERO_DENY\x10\x01\x12\x1e\n\x1a\x43HAT_MESSAGE_BARRACKS_KILL\x10\x02\x12\x1b\n\x17\x43HAT_MESSAGE_TOWER_KILL\x10\x03\x12\x1b\n\x17\x43HAT_MESSAGE_TOWER_DENY\x10\x04\x12\x1b\n\x17\x43HAT_MESSAGE_FIRSTBLOOD\x10\x05\x12\x1c\n\x18\x43HAT_MESSAGE_STREAK_KILL\x10\x06\x12\x18\n\x14\x43HAT_MESSAGE_BUYBACK\x10\x07\x12\x16\n\x12\x43HAT_MESSAGE_AEGIS\x10\x08\x12\x1c\n\x18\x43HAT_MESSAGE_ROSHAN_KILL\x10\t\x12\x1d\n\x19\x43HAT_MESSAGE_COURIER_LOST\x10\n\x12\"\n\x1e\x43HAT_MESSAGE_COURIER_RESPAWNED\x10\x0b\x12\x1b\n\x17\x43HAT_MESSAGE_GLYPH_USED\x10\x0c\x12\x1e\n\x1a\x43HAT_MESSAGE_ITEM_PURCHASE\x10\r\x12\x18\n\x14\x43HAT_MESSAGE_CONNECT\x10\x0e\x12\x1b\n\x17\x43HAT_MESSAGE_DISCONNECT\x10\x0f\x12.\n*CHAT_MESSAGE_DISCONNECT_WAIT_FOR_RECONNECT\x10\x10\x12*\n&CHAT_MESSAGE_DISCONNECT_TIME_REMAINING\x10\x11\x12\x31\n-CHAT_MESSAGE_DISCONNECT_TIME_REMAINING_PLURAL\x10\x12\x12\x1a\n\x16\x43HAT_MESSAGE_RECONNECT\x10\x13\x12\x18\n\x14\x43HAT_MESSAGE_ABANDON\x10\x14\x12\x1e\n\x1a\x43HAT_MESSAGE_SAFE_TO_LEAVE\x10\x15\x12\x1c\n\x18\x43HAT_MESSAGE_RUNE_PICKUP\x10\x16\x12\x1c\n\x18\x43HAT_MESSAGE_RUNE_BOTTLE\x10\x17\x12\x19\n\x15\x43HAT_MESSAGE_INTHEBAG\x10\x18\x12\x1b\n\x17\x43HAT_MESSAGE_SECRETSHOP\x10\x19\x12#\n\x1f\x43HAT_MESSAGE_ITEM_AUTOPURCHASED\x10\x1a\x12\x1f\n\x1b\x43HAT_MESSAGE_ITEMS_COMBINED\x10\x1b\x12\x1d\n\x19\x43HAT_MESSAGE_SUPER_CREEPS\x10\x1c\x12%\n!CHAT_MESSAGE_CANT_USE_ACTION_ITEM\x10\x1d\x12\"\n\x1e\x43HAT_MESSAGE_CHARGES_EXHAUSTED\x10\x1e\x12\x1a\n\x16\x43HAT_MESSAGE_CANTPAUSE\x10\x1f\x12\x1d\n\x19\x43HAT_MESSAGE_NOPAUSESLEFT\x10 \x12\x1d\n\x19\x43HAT_MESSAGE_CANTPAUSEYET\x10!\x12\x17\n\x13\x43HAT_MESSAGE_PAUSED\x10\"\x12\"\n\x1e\x43HAT_MESSAGE_UNPAUSE_COUNTDOWN\x10#\x12\x19\n\x15\x43HAT_MESSAGE_UNPAUSED\x10$\x12\x1e\n\x1a\x43HAT_MESSAGE_AUTO_UNPAUSED\x10%\x12\x1a\n\x16\x43HAT_MESSAGE_YOUPAUSED\x10&\x12 \n\x1c\x43HAT_MESSAGE_CANTUNPAUSETEAM\x10\'\x12(\n$CHAT_MESSAGE_SAFE_TO_LEAVE_ABANDONER\x10(\x12\"\n\x1e\x43HAT_MESSAGE_VOICE_TEXT_BANNED\x10)\x12.\n*CHAT_MESSAGE_SPECTATORS_WATCHING_THIS_GAME\x10*\x12 \n\x1c\x43HAT_MESSAGE_REPORT_REMINDER\x10+\x12\x1a\n\x16\x43HAT_MESSAGE_ECON_ITEM\x10,\x12\x16\n\x12\x43HAT_MESSAGE_TAUNT\x10-\x12\x17\n\x13\x43HAT_MESSAGE_RANDOM\x10.\x12\x18\n\x14\x43HAT_MESSAGE_RD_TURN\x10/\x12.\n*CHAT_MESSAGE_SAFE_TO_LEAVE_ABANDONER_EARLY\x10\x30\x12 \n\x1c\x43HAT_MESSAGE_DROP_RATE_BONUS\x10\x31\x12!\n\x1d\x43HAT_MESSAGE_NO_BATTLE_POINTS\x10\x32\x12\x1d\n\x19\x43HAT_MESSAGE_DENIED_AEGIS\x10\x33\x12\x1e\n\x1a\x43HAT_MESSAGE_INFORMATIONAL\x10\x34\x12\x1d\n\x19\x43HAT_MESSAGE_AEGIS_STOLEN\x10\x35\x12\x1d\n\x19\x43HAT_MESSAGE_ROSHAN_CANDY\x10\x36\x12\x1c\n\x18\x43HAT_MESSAGE_ITEM_GIFTED\x10\x37\x12\'\n#CHAT_MESSAGE_HERO_KILL_WITH_GREEVIL\x10\x38*\xb2\x01\n\x1d\x44OTA_NO_BATTLE_POINTS_REASONS\x12%\n!NO_BATTLE_POINTS_WRONG_LOBBY_TYPE\x10\x01\x12\"\n\x1eNO_BATTLE_POINTS_PRACTICE_BOTS\x10\x02\x12#\n\x1fNO_BATTLE_POINTS_CHEATS_ENABLED\x10\x03\x12!\n\x1dNO_BATTLE_POINTS_LOW_PRIORITY\x10\x04*7\n\x17\x44OTA_CHAT_INFORMATIONAL\x12\x1c\n\x18\x43OOP_BATTLE_POINTS_RULES\x10\x01*\xa9\x01\n\x14\x44OTA_COMBATLOG_TYPES\x12\x19\n\x15\x44OTA_COMBATLOG_DAMAGE\x10\x00\x12\x17\n\x13\x44OTA_COMBATLOG_HEAL\x10\x01\x12\x1f\n\x1b\x44OTA_COMBATLOG_MODIFIER_ADD\x10\x02\x12\"\n\x1e\x44OTA_COMBATLOG_MODIFIER_REMOVE\x10\x03\x12\x18\n\x14\x44OTA_COMBATLOG_DEATH\x10\x04*\xe5\x01\n\x13\x45\x44otaEntityMessages\x12\x14\n\x10\x44OTA_UNIT_SPEECH\x10\x00\x12\x19\n\x15\x44OTA_UNIT_SPEECH_MUTE\x10\x01\x12\x19\n\x15\x44OTA_UNIT_ADD_GESTURE\x10\x02\x12\x1c\n\x18\x44OTA_UNIT_REMOVE_GESTURE\x10\x03\x12!\n\x1d\x44OTA_UNIT_REMOVE_ALL_GESTURES\x10\x04\x12\x1a\n\x16\x44OTA_UNIT_FADE_GESTURE\x10\x06\x12%\n!DOTA_UNIT_SPEECH_CLIENTSIDE_RULES\x10\x07*\xda\x04\n\x15\x44OTA_PARTICLE_MESSAGE\x12&\n\"DOTA_PARTICLE_MANAGER_EVENT_CREATE\x10\x00\x12&\n\"DOTA_PARTICLE_MANAGER_EVENT_UPDATE\x10\x01\x12.\n*DOTA_PARTICLE_MANAGER_EVENT_UPDATE_FORWARD\x10\x02\x12\x32\n.DOTA_PARTICLE_MANAGER_EVENT_UPDATE_ORIENTATION\x10\x03\x12/\n+DOTA_PARTICLE_MANAGER_EVENT_UPDATE_FALLBACK\x10\x04\x12*\n&DOTA_PARTICLE_MANAGER_EVENT_UPDATE_ENT\x10\x05\x12-\n)DOTA_PARTICLE_MANAGER_EVENT_UPDATE_OFFSET\x10\x06\x12\'\n#DOTA_PARTICLE_MANAGER_EVENT_DESTROY\x10\x07\x12\x31\n-DOTA_PARTICLE_MANAGER_EVENT_DESTROY_INVOLVING\x10\x08\x12\'\n#DOTA_PARTICLE_MANAGER_EVENT_RELEASE\x10\t\x12\'\n#DOTA_PARTICLE_MANAGER_EVENT_LATENCY\x10\n\x12+\n\'DOTA_PARTICLE_MANAGER_EVENT_SHOULD_DRAW\x10\x0b\x12&\n\"DOTA_PARTICLE_MANAGER_EVENT_FROZEN\x10\x0c*\xee\x03\n\x13\x44OTA_OVERHEAD_ALERT\x12\x17\n\x13OVERHEAD_ALERT_GOLD\x10\x00\x12\x17\n\x13OVERHEAD_ALERT_DENY\x10\x01\x12\x1b\n\x17OVERHEAD_ALERT_CRITICAL\x10\x02\x12\x15\n\x11OVERHEAD_ALERT_XP\x10\x03\x12%\n!OVERHEAD_ALERT_BONUS_SPELL_DAMAGE\x10\x04\x12\x17\n\x13OVERHEAD_ALERT_MISS\x10\x05\x12\x19\n\x15OVERHEAD_ALERT_DAMAGE\x10\x06\x12\x18\n\x14OVERHEAD_ALERT_EVADE\x10\x07\x12\x18\n\x14OVERHEAD_ALERT_BLOCK\x10\x08\x12&\n\"OVERHEAD_ALERT_BONUS_POISON_DAMAGE\x10\t\x12\x17\n\x13OVERHEAD_ALERT_HEAL\x10\n\x12\x1b\n\x17OVERHEAD_ALERT_MANA_ADD\x10\x0b\x12\x1c\n\x18OVERHEAD_ALERT_MANA_LOSS\x10\x0c\x12!\n\x1dOVERHEAD_ALERT_LAST_HIT_EARLY\x10\r\x12!\n\x1dOVERHEAD_ALERT_LAST_HIT_CLOSE\x10\x0e\x12 \n\x1cOVERHEAD_ALERT_LAST_HIT_MISS\x10\x0f')
_EDOTAUSERMESSAGES = _descriptor.EnumDescriptor(
name='EDotaUserMessages',
full_name='EDotaUserMessages',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DOTA_UM_AddUnitToSelection', index=0, number=64,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_AIDebugLine', index=1, number=65,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_ChatEvent', index=2, number=66,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_CombatHeroPositions', index=3, number=67,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_CombatLogData', index=4, number=68,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_CombatLogShowDeath', index=5, number=70,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_CreateLinearProjectile', index=6, number=71,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_DestroyLinearProjectile', index=7, number=72,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_DodgeTrackingProjectiles', index=8, number=73,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_GlobalLightColor', index=9, number=74,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_GlobalLightDirection', index=10, number=75,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_InvalidCommand', index=11, number=76,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_LocationPing', index=12, number=77,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_MapLine', index=13, number=78,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_MiniKillCamInfo', index=14, number=79,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_MinimapDebugPoint', index=15, number=80,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_MinimapEvent', index=16, number=81,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_NevermoreRequiem', index=17, number=82,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_OverheadEvent', index=18, number=83,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_SetNextAutobuyItem', index=19, number=84,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_SharedCooldown', index=20, number=85,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_SpectatorPlayerClick', index=21, number=86,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_TutorialTipInfo', index=22, number=87,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_UnitEvent', index=23, number=88,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_ParticleManager', index=24, number=89,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_BotChat', index=25, number=90,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_HudError', index=26, number=91,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_ItemPurchased', index=27, number=92,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_Ping', index=28, number=93,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_ItemFound', index=29, number=94,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_CharacterSpeakConcept', index=30, number=95,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_SwapVerify', index=31, number=96,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_WorldLine', index=32, number=97,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_TournamentDrop', index=33, number=98,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_ItemAlert', index=34, number=99,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_HalloweenDrops', index=35, number=100,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_ChatWheel', index=36, number=101,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_ReceivedXmasGift', index=37, number=102,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_UpdateSharedContent', index=38, number=103,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_TutorialRequestExp', index=39, number=104,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_TutorialPingMinimap', index=40, number=105,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_GamerulesStateChanged', index=41, number=106,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_ShowSurvey', index=42, number=107,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_TutorialFade', index=43, number=108,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_AddQuestLogEntry', index=44, number=109,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_SendStatPopup', index=45, number=110,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_TutorialFinish', index=46, number=111,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_SendRoshanPopup', index=47, number=112,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_SendGenericToolTip', index=48, number=113,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UM_SendFinalGold', index=49, number=114,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=7795,
serialized_end=9235,
)
EDotaUserMessages = enum_type_wrapper.EnumTypeWrapper(_EDOTAUSERMESSAGES)
_DOTA_CHAT_MESSAGE = _descriptor.EnumDescriptor(
name='DOTA_CHAT_MESSAGE',
full_name='DOTA_CHAT_MESSAGE',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_INVALID', index=0, number=-1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_HERO_KILL', index=1, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_HERO_DENY', index=2, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_BARRACKS_KILL', index=3, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_TOWER_KILL', index=4, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_TOWER_DENY', index=5, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_FIRSTBLOOD', index=6, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_STREAK_KILL', index=7, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_BUYBACK', index=8, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_AEGIS', index=9, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_ROSHAN_KILL', index=10, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_COURIER_LOST', index=11, number=10,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_COURIER_RESPAWNED', index=12, number=11,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_GLYPH_USED', index=13, number=12,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_ITEM_PURCHASE', index=14, number=13,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_CONNECT', index=15, number=14,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_DISCONNECT', index=16, number=15,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_DISCONNECT_WAIT_FOR_RECONNECT', index=17, number=16,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_DISCONNECT_TIME_REMAINING', index=18, number=17,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_DISCONNECT_TIME_REMAINING_PLURAL', index=19, number=18,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_RECONNECT', index=20, number=19,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_ABANDON', index=21, number=20,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_SAFE_TO_LEAVE', index=22, number=21,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_RUNE_PICKUP', index=23, number=22,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_RUNE_BOTTLE', index=24, number=23,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_INTHEBAG', index=25, number=24,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_SECRETSHOP', index=26, number=25,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_ITEM_AUTOPURCHASED', index=27, number=26,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_ITEMS_COMBINED', index=28, number=27,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_SUPER_CREEPS', index=29, number=28,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_CANT_USE_ACTION_ITEM', index=30, number=29,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_CHARGES_EXHAUSTED', index=31, number=30,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_CANTPAUSE', index=32, number=31,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_NOPAUSESLEFT', index=33, number=32,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_CANTPAUSEYET', index=34, number=33,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_PAUSED', index=35, number=34,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_UNPAUSE_COUNTDOWN', index=36, number=35,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_UNPAUSED', index=37, number=36,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_AUTO_UNPAUSED', index=38, number=37,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_YOUPAUSED', index=39, number=38,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_CANTUNPAUSETEAM', index=40, number=39,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_SAFE_TO_LEAVE_ABANDONER', index=41, number=40,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_VOICE_TEXT_BANNED', index=42, number=41,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_SPECTATORS_WATCHING_THIS_GAME', index=43, number=42,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_REPORT_REMINDER', index=44, number=43,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_ECON_ITEM', index=45, number=44,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_TAUNT', index=46, number=45,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_RANDOM', index=47, number=46,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_RD_TURN', index=48, number=47,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_SAFE_TO_LEAVE_ABANDONER_EARLY', index=49, number=48,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_DROP_RATE_BONUS', index=50, number=49,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_NO_BATTLE_POINTS', index=51, number=50,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_DENIED_AEGIS', index=52, number=51,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_INFORMATIONAL', index=53, number=52,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_AEGIS_STOLEN', index=54, number=53,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_ROSHAN_CANDY', index=55, number=54,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_ITEM_GIFTED', index=56, number=55,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CHAT_MESSAGE_HERO_KILL_WITH_GREEVIL', index=57, number=56,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=9238,
serialized_end=11129,
)
DOTA_CHAT_MESSAGE = enum_type_wrapper.EnumTypeWrapper(_DOTA_CHAT_MESSAGE)
_DOTA_NO_BATTLE_POINTS_REASONS = _descriptor.EnumDescriptor(
name='DOTA_NO_BATTLE_POINTS_REASONS',
full_name='DOTA_NO_BATTLE_POINTS_REASONS',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NO_BATTLE_POINTS_WRONG_LOBBY_TYPE', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NO_BATTLE_POINTS_PRACTICE_BOTS', index=1, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NO_BATTLE_POINTS_CHEATS_ENABLED', index=2, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NO_BATTLE_POINTS_LOW_PRIORITY', index=3, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11132,
serialized_end=11310,
)
DOTA_NO_BATTLE_POINTS_REASONS = enum_type_wrapper.EnumTypeWrapper(_DOTA_NO_BATTLE_POINTS_REASONS)
_DOTA_CHAT_INFORMATIONAL = _descriptor.EnumDescriptor(
name='DOTA_CHAT_INFORMATIONAL',
full_name='DOTA_CHAT_INFORMATIONAL',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='COOP_BATTLE_POINTS_RULES', index=0, number=1,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11312,
serialized_end=11367,
)
DOTA_CHAT_INFORMATIONAL = enum_type_wrapper.EnumTypeWrapper(_DOTA_CHAT_INFORMATIONAL)
_DOTA_COMBATLOG_TYPES = _descriptor.EnumDescriptor(
name='DOTA_COMBATLOG_TYPES',
full_name='DOTA_COMBATLOG_TYPES',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DOTA_COMBATLOG_DAMAGE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_COMBATLOG_HEAL', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_COMBATLOG_MODIFIER_ADD', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_COMBATLOG_MODIFIER_REMOVE', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_COMBATLOG_DEATH', index=4, number=4,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11370,
serialized_end=11539,
)
DOTA_COMBATLOG_TYPES = enum_type_wrapper.EnumTypeWrapper(_DOTA_COMBATLOG_TYPES)
_EDOTAENTITYMESSAGES = _descriptor.EnumDescriptor(
name='EDotaEntityMessages',
full_name='EDotaEntityMessages',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DOTA_UNIT_SPEECH', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UNIT_SPEECH_MUTE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UNIT_ADD_GESTURE', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UNIT_REMOVE_GESTURE', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UNIT_REMOVE_ALL_GESTURES', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UNIT_FADE_GESTURE', index=5, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_UNIT_SPEECH_CLIENTSIDE_RULES', index=6, number=7,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11542,
serialized_end=11771,
)
EDotaEntityMessages = enum_type_wrapper.EnumTypeWrapper(_EDOTAENTITYMESSAGES)
_DOTA_PARTICLE_MESSAGE = _descriptor.EnumDescriptor(
name='DOTA_PARTICLE_MESSAGE',
full_name='DOTA_PARTICLE_MESSAGE',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_CREATE', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_UPDATE', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_UPDATE_FORWARD', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_UPDATE_ORIENTATION', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_UPDATE_FALLBACK', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_UPDATE_ENT', index=5, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_UPDATE_OFFSET', index=6, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_DESTROY', index=7, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_DESTROY_INVOLVING', index=8, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_RELEASE', index=9, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_LATENCY', index=10, number=10,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_SHOULD_DRAW', index=11, number=11,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DOTA_PARTICLE_MANAGER_EVENT_FROZEN', index=12, number=12,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=11774,
serialized_end=12376,
)
DOTA_PARTICLE_MESSAGE = enum_type_wrapper.EnumTypeWrapper(_DOTA_PARTICLE_MESSAGE)
_DOTA_OVERHEAD_ALERT = _descriptor.EnumDescriptor(
name='DOTA_OVERHEAD_ALERT',
full_name='DOTA_OVERHEAD_ALERT',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_GOLD', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_DENY', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_CRITICAL', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_XP', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_BONUS_SPELL_DAMAGE', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_MISS', index=5, number=5,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_DAMAGE', index=6, number=6,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_EVADE', index=7, number=7,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_BLOCK', index=8, number=8,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_BONUS_POISON_DAMAGE', index=9, number=9,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_HEAL', index=10, number=10,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_MANA_ADD', index=11, number=11,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_MANA_LOSS', index=12, number=12,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_LAST_HIT_EARLY', index=13, number=13,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_LAST_HIT_CLOSE', index=14, number=14,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='OVERHEAD_ALERT_LAST_HIT_MISS', index=15, number=15,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=12379,
serialized_end=12873,
)
DOTA_OVERHEAD_ALERT = enum_type_wrapper.EnumTypeWrapper(_DOTA_OVERHEAD_ALERT)
DOTA_UM_AddUnitToSelection = 64
DOTA_UM_AIDebugLine = 65
DOTA_UM_ChatEvent = 66
DOTA_UM_CombatHeroPositions = 67
DOTA_UM_CombatLogData = 68
DOTA_UM_CombatLogShowDeath = 70
DOTA_UM_CreateLinearProjectile = 71
DOTA_UM_DestroyLinearProjectile = 72
DOTA_UM_DodgeTrackingProjectiles = 73
DOTA_UM_GlobalLightColor = 74
DOTA_UM_GlobalLightDirection = 75
DOTA_UM_InvalidCommand = 76
DOTA_UM_LocationPing = 77
DOTA_UM_MapLine = 78
DOTA_UM_MiniKillCamInfo = 79
DOTA_UM_MinimapDebugPoint = 80
DOTA_UM_MinimapEvent = 81
DOTA_UM_NevermoreRequiem = 82
DOTA_UM_OverheadEvent = 83
DOTA_UM_SetNextAutobuyItem = 84
DOTA_UM_SharedCooldown = 85
DOTA_UM_SpectatorPlayerClick = 86
DOTA_UM_TutorialTipInfo = 87
DOTA_UM_UnitEvent = 88
DOTA_UM_ParticleManager = 89
DOTA_UM_BotChat = 90
DOTA_UM_HudError = 91
DOTA_UM_ItemPurchased = 92
DOTA_UM_Ping = 93
DOTA_UM_ItemFound = 94
DOTA_UM_CharacterSpeakConcept = 95
DOTA_UM_SwapVerify = 96
DOTA_UM_WorldLine = 97
DOTA_UM_TournamentDrop = 98
DOTA_UM_ItemAlert = 99
DOTA_UM_HalloweenDrops = 100
DOTA_UM_ChatWheel = 101
DOTA_UM_ReceivedXmasGift = 102
DOTA_UM_UpdateSharedContent = 103
DOTA_UM_TutorialRequestExp = 104
DOTA_UM_TutorialPingMinimap = 105
DOTA_UM_GamerulesStateChanged = 106
DOTA_UM_ShowSurvey = 107
DOTA_UM_TutorialFade = 108
DOTA_UM_AddQuestLogEntry = 109
DOTA_UM_SendStatPopup = 110
DOTA_UM_TutorialFinish = 111
DOTA_UM_SendRoshanPopup = 112
DOTA_UM_SendGenericToolTip = 113
DOTA_UM_SendFinalGold = 114
CHAT_MESSAGE_INVALID = -1
CHAT_MESSAGE_HERO_KILL = 0
CHAT_MESSAGE_HERO_DENY = 1
CHAT_MESSAGE_BARRACKS_KILL = 2
CHAT_MESSAGE_TOWER_KILL = 3
CHAT_MESSAGE_TOWER_DENY = 4
CHAT_MESSAGE_FIRSTBLOOD = 5
CHAT_MESSAGE_STREAK_KILL = 6
CHAT_MESSAGE_BUYBACK = 7
CHAT_MESSAGE_AEGIS = 8
CHAT_MESSAGE_ROSHAN_KILL = 9
CHAT_MESSAGE_COURIER_LOST = 10
CHAT_MESSAGE_COURIER_RESPAWNED = 11
CHAT_MESSAGE_GLYPH_USED = 12
CHAT_MESSAGE_ITEM_PURCHASE = 13
CHAT_MESSAGE_CONNECT = 14
CHAT_MESSAGE_DISCONNECT = 15
CHAT_MESSAGE_DISCONNECT_WAIT_FOR_RECONNECT = 16
CHAT_MESSAGE_DISCONNECT_TIME_REMAINING = 17
CHAT_MESSAGE_DISCONNECT_TIME_REMAINING_PLURAL = 18
CHAT_MESSAGE_RECONNECT = 19
CHAT_MESSAGE_ABANDON = 20
CHAT_MESSAGE_SAFE_TO_LEAVE = 21
CHAT_MESSAGE_RUNE_PICKUP = 22
CHAT_MESSAGE_RUNE_BOTTLE = 23
CHAT_MESSAGE_INTHEBAG = 24
CHAT_MESSAGE_SECRETSHOP = 25
CHAT_MESSAGE_ITEM_AUTOPURCHASED = 26
CHAT_MESSAGE_ITEMS_COMBINED = 27
CHAT_MESSAGE_SUPER_CREEPS = 28
CHAT_MESSAGE_CANT_USE_ACTION_ITEM = 29
CHAT_MESSAGE_CHARGES_EXHAUSTED = 30
CHAT_MESSAGE_CANTPAUSE = 31
CHAT_MESSAGE_NOPAUSESLEFT = 32
CHAT_MESSAGE_CANTPAUSEYET = 33
CHAT_MESSAGE_PAUSED = 34
CHAT_MESSAGE_UNPAUSE_COUNTDOWN = 35
CHAT_MESSAGE_UNPAUSED = 36
CHAT_MESSAGE_AUTO_UNPAUSED = 37
CHAT_MESSAGE_YOUPAUSED = 38
CHAT_MESSAGE_CANTUNPAUSETEAM = 39
CHAT_MESSAGE_SAFE_TO_LEAVE_ABANDONER = 40
CHAT_MESSAGE_VOICE_TEXT_BANNED = 41
CHAT_MESSAGE_SPECTATORS_WATCHING_THIS_GAME = 42
CHAT_MESSAGE_REPORT_REMINDER = 43
CHAT_MESSAGE_ECON_ITEM = 44
CHAT_MESSAGE_TAUNT = 45
CHAT_MESSAGE_RANDOM = 46
CHAT_MESSAGE_RD_TURN = 47
CHAT_MESSAGE_SAFE_TO_LEAVE_ABANDONER_EARLY = 48
CHAT_MESSAGE_DROP_RATE_BONUS = 49
CHAT_MESSAGE_NO_BATTLE_POINTS = 50
CHAT_MESSAGE_DENIED_AEGIS = 51
CHAT_MESSAGE_INFORMATIONAL = 52
CHAT_MESSAGE_AEGIS_STOLEN = 53
CHAT_MESSAGE_ROSHAN_CANDY = 54
CHAT_MESSAGE_ITEM_GIFTED = 55
CHAT_MESSAGE_HERO_KILL_WITH_GREEVIL = 56
NO_BATTLE_POINTS_WRONG_LOBBY_TYPE = 1
NO_BATTLE_POINTS_PRACTICE_BOTS = 2
NO_BATTLE_POINTS_CHEATS_ENABLED = 3
NO_BATTLE_POINTS_LOW_PRIORITY = 4
COOP_BATTLE_POINTS_RULES = 1
DOTA_COMBATLOG_DAMAGE = 0
DOTA_COMBATLOG_HEAL = 1
DOTA_COMBATLOG_MODIFIER_ADD = 2
DOTA_COMBATLOG_MODIFIER_REMOVE = 3
DOTA_COMBATLOG_DEATH = 4
DOTA_UNIT_SPEECH = 0
DOTA_UNIT_SPEECH_MUTE = 1
DOTA_UNIT_ADD_GESTURE = 2
DOTA_UNIT_REMOVE_GESTURE = 3
DOTA_UNIT_REMOVE_ALL_GESTURES = 4
DOTA_UNIT_FADE_GESTURE = 6
DOTA_UNIT_SPEECH_CLIENTSIDE_RULES = 7
DOTA_PARTICLE_MANAGER_EVENT_CREATE = 0
DOTA_PARTICLE_MANAGER_EVENT_UPDATE = 1
DOTA_PARTICLE_MANAGER_EVENT_UPDATE_FORWARD = 2
DOTA_PARTICLE_MANAGER_EVENT_UPDATE_ORIENTATION = 3
DOTA_PARTICLE_MANAGER_EVENT_UPDATE_FALLBACK = 4
DOTA_PARTICLE_MANAGER_EVENT_UPDATE_ENT = 5
DOTA_PARTICLE_MANAGER_EVENT_UPDATE_OFFSET = 6
DOTA_PARTICLE_MANAGER_EVENT_DESTROY = 7
DOTA_PARTICLE_MANAGER_EVENT_DESTROY_INVOLVING = 8
DOTA_PARTICLE_MANAGER_EVENT_RELEASE = 9
DOTA_PARTICLE_MANAGER_EVENT_LATENCY = 10
DOTA_PARTICLE_MANAGER_EVENT_SHOULD_DRAW = 11
DOTA_PARTICLE_MANAGER_EVENT_FROZEN = 12
OVERHEAD_ALERT_GOLD = 0
OVERHEAD_ALERT_DENY = 1
OVERHEAD_ALERT_CRITICAL = 2
OVERHEAD_ALERT_XP = 3
OVERHEAD_ALERT_BONUS_SPELL_DAMAGE = 4
OVERHEAD_ALERT_MISS = 5
OVERHEAD_ALERT_DAMAGE = 6
OVERHEAD_ALERT_EVADE = 7
OVERHEAD_ALERT_BLOCK = 8
OVERHEAD_ALERT_BONUS_POISON_DAMAGE = 9
OVERHEAD_ALERT_HEAL = 10
OVERHEAD_ALERT_MANA_ADD = 11
OVERHEAD_ALERT_MANA_LOSS = 12
OVERHEAD_ALERT_LAST_HIT_EARLY = 13
OVERHEAD_ALERT_LAST_HIT_CLOSE = 14
OVERHEAD_ALERT_LAST_HIT_MISS = 15
_CDOTARESPONSEQUERYSERIALIZED_FACT_VALUETYPE = _descriptor.EnumDescriptor(
name='ValueType',
full_name='CDOTAResponseQuerySerialized.Fact.ValueType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NUMERIC', index=0, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STRING', index=1, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=2932,
serialized_end=2968,
)
_CDOTAUSERMSG_AIDEBUGLINE = _descriptor.Descriptor(
name='CDOTAUserMsg_AIDebugLine',
full_name='CDOTAUserMsg_AIDebugLine',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='message', full_name='CDOTAUserMsg_AIDebugLine.message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=131,
serialized_end=174,
)
_CDOTAUSERMSG_PING = _descriptor.Descriptor(
name='CDOTAUserMsg_Ping',
full_name='CDOTAUserMsg_Ping',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='message', full_name='CDOTAUserMsg_Ping.message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=176,
serialized_end=212,
)
_CDOTAUSERMSG_SWAPVERIFY = _descriptor.Descriptor(
name='CDOTAUserMsg_SwapVerify',
full_name='CDOTAUserMsg_SwapVerify',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_SwapVerify.player_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=214,
serialized_end=258,
)
_CDOTAUSERMSG_CHATEVENT = _descriptor.Descriptor(
name='CDOTAUserMsg_ChatEvent',
full_name='CDOTAUserMsg_ChatEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='CDOTAUserMsg_ChatEvent.type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='CDOTAUserMsg_ChatEvent.value', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='playerid_1', full_name='CDOTAUserMsg_ChatEvent.playerid_1', index=2,
number=3, type=17, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='playerid_2', full_name='CDOTAUserMsg_ChatEvent.playerid_2', index=3,
number=4, type=17, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='playerid_3', full_name='CDOTAUserMsg_ChatEvent.playerid_3', index=4,
number=5, type=17, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='playerid_4', full_name='CDOTAUserMsg_ChatEvent.playerid_4', index=5,
number=6, type=17, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='playerid_5', full_name='CDOTAUserMsg_ChatEvent.playerid_5', index=6,
number=7, type=17, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='playerid_6', full_name='CDOTAUserMsg_ChatEvent.playerid_6', index=7,
number=8, type=17, cpp_type=1, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=261,
serialized_end=500,
)
_CDOTAUSERMSG_COMBATLOGDATA = _descriptor.Descriptor(
name='CDOTAUserMsg_CombatLogData',
full_name='CDOTAUserMsg_CombatLogData',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='CDOTAUserMsg_CombatLogData.type', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='target_name', full_name='CDOTAUserMsg_CombatLogData.target_name', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attacker_name', full_name='CDOTAUserMsg_CombatLogData.attacker_name', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attacker_illusion', full_name='CDOTAUserMsg_CombatLogData.attacker_illusion', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='target_illusion', full_name='CDOTAUserMsg_CombatLogData.target_illusion', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inflictor_name', full_name='CDOTAUserMsg_CombatLogData.inflictor_name', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='CDOTAUserMsg_CombatLogData.value', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='health', full_name='CDOTAUserMsg_CombatLogData.health', index=7,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='time', full_name='CDOTAUserMsg_CombatLogData.time', index=8,
number=9, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=503,
serialized_end=756,
)
_CDOTAUSERMSG_COMBATLOGSHOWDEATH = _descriptor.Descriptor(
name='CDOTAUserMsg_CombatLogShowDeath',
full_name='CDOTAUserMsg_CombatLogShowDeath',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=758,
serialized_end=791,
)
_CDOTAUSERMSG_BOTCHAT = _descriptor.Descriptor(
name='CDOTAUserMsg_BotChat',
full_name='CDOTAUserMsg_BotChat',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_BotChat.player_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='format', full_name='CDOTAUserMsg_BotChat.format', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='CDOTAUserMsg_BotChat.message', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='target', full_name='CDOTAUserMsg_BotChat.target', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=793,
serialized_end=883,
)
_CDOTAUSERMSG_COMBATHEROPOSITIONS = _descriptor.Descriptor(
name='CDOTAUserMsg_CombatHeroPositions',
full_name='CDOTAUserMsg_CombatHeroPositions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='index', full_name='CDOTAUserMsg_CombatHeroPositions.index', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='time', full_name='CDOTAUserMsg_CombatHeroPositions.time', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='world_pos', full_name='CDOTAUserMsg_CombatHeroPositions.world_pos', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='health', full_name='CDOTAUserMsg_CombatHeroPositions.health', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=885,
serialized_end=998,
)
_CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER_ABILITY = _descriptor.Descriptor(
name='Ability',
full_name='CDOTAUserMsg_MiniKillCamInfo.Attacker.Ability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ability', full_name='CDOTAUserMsg_MiniKillCamInfo.Attacker.Ability.ability', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='damage', full_name='CDOTAUserMsg_MiniKillCamInfo.Attacker.Ability.damage', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1212,
serialized_end=1254,
)
_CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER = _descriptor.Descriptor(
name='Attacker',
full_name='CDOTAUserMsg_MiniKillCamInfo.Attacker',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='attacker', full_name='CDOTAUserMsg_MiniKillCamInfo.Attacker.attacker', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='total_damage', full_name='CDOTAUserMsg_MiniKillCamInfo.Attacker.total_damage', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='abilities', full_name='CDOTAUserMsg_MiniKillCamInfo.Attacker.abilities', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER_ABILITY, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1093,
serialized_end=1254,
)
_CDOTAUSERMSG_MINIKILLCAMINFO = _descriptor.Descriptor(
name='CDOTAUserMsg_MiniKillCamInfo',
full_name='CDOTAUserMsg_MiniKillCamInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='attackers', full_name='CDOTAUserMsg_MiniKillCamInfo.attackers', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1001,
serialized_end=1254,
)
_CDOTAUSERMSG_GLOBALLIGHTCOLOR = _descriptor.Descriptor(
name='CDOTAUserMsg_GlobalLightColor',
full_name='CDOTAUserMsg_GlobalLightColor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='color', full_name='CDOTAUserMsg_GlobalLightColor.color', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='duration', full_name='CDOTAUserMsg_GlobalLightColor.duration', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1256,
serialized_end=1320,
)
_CDOTAUSERMSG_GLOBALLIGHTDIRECTION = _descriptor.Descriptor(
name='CDOTAUserMsg_GlobalLightDirection',
full_name='CDOTAUserMsg_GlobalLightDirection',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='direction', full_name='CDOTAUserMsg_GlobalLightDirection.direction', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='duration', full_name='CDOTAUserMsg_GlobalLightDirection.duration', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1322,
serialized_end=1407,
)
_CDOTAUSERMSG_LOCATIONPING = _descriptor.Descriptor(
name='CDOTAUserMsg_LocationPing',
full_name='CDOTAUserMsg_LocationPing',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_LocationPing.player_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='location_ping', full_name='CDOTAUserMsg_LocationPing.location_ping', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1409,
serialized_end=1502,
)
_CDOTAUSERMSG_ITEMALERT = _descriptor.Descriptor(
name='CDOTAUserMsg_ItemAlert',
full_name='CDOTAUserMsg_ItemAlert',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_ItemAlert.player_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='item_alert', full_name='CDOTAUserMsg_ItemAlert.item_alert', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1504,
serialized_end=1588,
)
_CDOTAUSERMSG_MINIMAPEVENT = _descriptor.Descriptor(
name='CDOTAUserMsg_MinimapEvent',
full_name='CDOTAUserMsg_MinimapEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='event_type', full_name='CDOTAUserMsg_MinimapEvent.event_type', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entity_handle', full_name='CDOTAUserMsg_MinimapEvent.entity_handle', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x', full_name='CDOTAUserMsg_MinimapEvent.x', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y', full_name='CDOTAUserMsg_MinimapEvent.y', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='duration', full_name='CDOTAUserMsg_MinimapEvent.duration', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1590,
serialized_end=1700,
)
_CDOTAUSERMSG_MAPLINE = _descriptor.Descriptor(
name='CDOTAUserMsg_MapLine',
full_name='CDOTAUserMsg_MapLine',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_MapLine.player_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='mapline', full_name='CDOTAUserMsg_MapLine.mapline', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1702,
serialized_end=1779,
)
_CDOTAUSERMSG_MINIMAPDEBUGPOINT = _descriptor.Descriptor(
name='CDOTAUserMsg_MinimapDebugPoint',
full_name='CDOTAUserMsg_MinimapDebugPoint',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='location', full_name='CDOTAUserMsg_MinimapDebugPoint.location', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='color', full_name='CDOTAUserMsg_MinimapDebugPoint.color', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='size', full_name='CDOTAUserMsg_MinimapDebugPoint.size', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='duration', full_name='CDOTAUserMsg_MinimapDebugPoint.duration', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1781,
serialized_end=1891,
)
_CDOTAUSERMSG_CREATELINEARPROJECTILE = _descriptor.Descriptor(
name='CDOTAUserMsg_CreateLinearProjectile',
full_name='CDOTAUserMsg_CreateLinearProjectile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='origin', full_name='CDOTAUserMsg_CreateLinearProjectile.origin', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='velocity', full_name='CDOTAUserMsg_CreateLinearProjectile.velocity', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='latency', full_name='CDOTAUserMsg_CreateLinearProjectile.latency', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entindex', full_name='CDOTAUserMsg_CreateLinearProjectile.entindex', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='particle_index', full_name='CDOTAUserMsg_CreateLinearProjectile.particle_index', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='handle', full_name='CDOTAUserMsg_CreateLinearProjectile.handle', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=1894,
serialized_end=2068,
)
_CDOTAUSERMSG_DESTROYLINEARPROJECTILE = _descriptor.Descriptor(
name='CDOTAUserMsg_DestroyLinearProjectile',
full_name='CDOTAUserMsg_DestroyLinearProjectile',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='handle', full_name='CDOTAUserMsg_DestroyLinearProjectile.handle', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2070,
serialized_end=2124,
)
_CDOTAUSERMSG_DODGETRACKINGPROJECTILES = _descriptor.Descriptor(
name='CDOTAUserMsg_DodgeTrackingProjectiles',
full_name='CDOTAUserMsg_DodgeTrackingProjectiles',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entindex', full_name='CDOTAUserMsg_DodgeTrackingProjectiles.entindex', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2126,
serialized_end=2183,
)
_CDOTAUSERMSG_SPECTATORPLAYERCLICK = _descriptor.Descriptor(
name='CDOTAUserMsg_SpectatorPlayerClick',
full_name='CDOTAUserMsg_SpectatorPlayerClick',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entindex', full_name='CDOTAUserMsg_SpectatorPlayerClick.entindex', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='order_type', full_name='CDOTAUserMsg_SpectatorPlayerClick.order_type', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='target_index', full_name='CDOTAUserMsg_SpectatorPlayerClick.target_index', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2185,
serialized_end=2280,
)
_CDOTAUSERMSG_NEVERMOREREQUIEM = _descriptor.Descriptor(
name='CDOTAUserMsg_NevermoreRequiem',
full_name='CDOTAUserMsg_NevermoreRequiem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entity_handle', full_name='CDOTAUserMsg_NevermoreRequiem.entity_handle', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='lines', full_name='CDOTAUserMsg_NevermoreRequiem.lines', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='origin', full_name='CDOTAUserMsg_NevermoreRequiem.origin', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2282,
serialized_end=2380,
)
_CDOTAUSERMSG_INVALIDCOMMAND = _descriptor.Descriptor(
name='CDOTAUserMsg_InvalidCommand',
full_name='CDOTAUserMsg_InvalidCommand',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='message', full_name='CDOTAUserMsg_InvalidCommand.message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2382,
serialized_end=2428,
)
_CDOTAUSERMSG_HUDERROR = _descriptor.Descriptor(
name='CDOTAUserMsg_HudError',
full_name='CDOTAUserMsg_HudError',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='order_id', full_name='CDOTAUserMsg_HudError.order_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2430,
serialized_end=2471,
)
_CDOTAUSERMSG_SHAREDCOOLDOWN = _descriptor.Descriptor(
name='CDOTAUserMsg_SharedCooldown',
full_name='CDOTAUserMsg_SharedCooldown',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='entindex', full_name='CDOTAUserMsg_SharedCooldown.entindex', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name', full_name='CDOTAUserMsg_SharedCooldown.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='cooldown', full_name='CDOTAUserMsg_SharedCooldown.cooldown', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='name_index', full_name='CDOTAUserMsg_SharedCooldown.name_index', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2473,
serialized_end=2572,
)
_CDOTAUSERMSG_SETNEXTAUTOBUYITEM = _descriptor.Descriptor(
name='CDOTAUserMsg_SetNextAutobuyItem',
full_name='CDOTAUserMsg_SetNextAutobuyItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='CDOTAUserMsg_SetNextAutobuyItem.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2574,
serialized_end=2621,
)
_CDOTAUSERMSG_HALLOWEENDROPS = _descriptor.Descriptor(
name='CDOTAUserMsg_HalloweenDrops',
full_name='CDOTAUserMsg_HalloweenDrops',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='item_defs', full_name='CDOTAUserMsg_HalloweenDrops.item_defs', index=0,
number=1, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='player_ids', full_name='CDOTAUserMsg_HalloweenDrops.player_ids', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='prize_list', full_name='CDOTAUserMsg_HalloweenDrops.prize_list', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2623,
serialized_end=2711,
)
_CDOTARESPONSEQUERYSERIALIZED_FACT = _descriptor.Descriptor(
name='Fact',
full_name='CDOTAResponseQuerySerialized.Fact',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='CDOTAResponseQuerySerialized.Fact.key', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='valtype', full_name='CDOTAResponseQuerySerialized.Fact.valtype', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='val_numeric', full_name='CDOTAResponseQuerySerialized.Fact.val_numeric', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='val_string', full_name='CDOTAResponseQuerySerialized.Fact.val_string', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_CDOTARESPONSEQUERYSERIALIZED_FACT_VALUETYPE,
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2798,
serialized_end=2968,
)
_CDOTARESPONSEQUERYSERIALIZED = _descriptor.Descriptor(
name='CDOTAResponseQuerySerialized',
full_name='CDOTAResponseQuerySerialized',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='facts', full_name='CDOTAResponseQuerySerialized.facts', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_CDOTARESPONSEQUERYSERIALIZED_FACT, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2714,
serialized_end=2968,
)
_CDOTASPEECHMATCHONCLIENT = _descriptor.Descriptor(
name='CDOTASpeechMatchOnClient',
full_name='CDOTASpeechMatchOnClient',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='concept', full_name='CDOTASpeechMatchOnClient.concept', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='recipient_type', full_name='CDOTASpeechMatchOnClient.recipient_type', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='responsequery', full_name='CDOTASpeechMatchOnClient.responsequery', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='randomseed', full_name='CDOTASpeechMatchOnClient.randomseed', index=3,
number=4, type=15, cpp_type=1, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=2971,
serialized_end=3115,
)
_CDOTAUSERMSG_UNITEVENT_SPEECH = _descriptor.Descriptor(
name='Speech',
full_name='CDOTAUserMsg_UnitEvent.Speech',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='concept', full_name='CDOTAUserMsg_UnitEvent.Speech.concept', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='response', full_name='CDOTAUserMsg_UnitEvent.Speech.response', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='recipient_type', full_name='CDOTAUserMsg_UnitEvent.Speech.recipient_type', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='level', full_name='CDOTAUserMsg_UnitEvent.Speech.level', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='muteable', full_name='CDOTAUserMsg_UnitEvent.Speech.muteable', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3626,
serialized_end=3733,
)
_CDOTAUSERMSG_UNITEVENT_SPEECHMUTE = _descriptor.Descriptor(
name='SpeechMute',
full_name='CDOTAUserMsg_UnitEvent.SpeechMute',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='delay', full_name='CDOTAUserMsg_UnitEvent.SpeechMute.delay', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.5,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3735,
serialized_end=3767,
)
_CDOTAUSERMSG_UNITEVENT_ADDGESTURE = _descriptor.Descriptor(
name='AddGesture',
full_name='CDOTAUserMsg_UnitEvent.AddGesture',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='activity', full_name='CDOTAUserMsg_UnitEvent.AddGesture.activity', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='slot', full_name='CDOTAUserMsg_UnitEvent.AddGesture.slot', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fade_in', full_name='CDOTAUserMsg_UnitEvent.AddGesture.fade_in', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fade_out', full_name='CDOTAUserMsg_UnitEvent.AddGesture.fade_out', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=True, default_value=0.1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3769,
serialized_end=3880,
)
_CDOTAUSERMSG_UNITEVENT_REMOVEGESTURE = _descriptor.Descriptor(
name='RemoveGesture',
full_name='CDOTAUserMsg_UnitEvent.RemoveGesture',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='activity', full_name='CDOTAUserMsg_UnitEvent.RemoveGesture.activity', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3882,
serialized_end=3939,
)
_CDOTAUSERMSG_UNITEVENT_BLOODIMPACT = _descriptor.Descriptor(
name='BloodImpact',
full_name='CDOTAUserMsg_UnitEvent.BloodImpact',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='scale', full_name='CDOTAUserMsg_UnitEvent.BloodImpact.scale', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='x_normal', full_name='CDOTAUserMsg_UnitEvent.BloodImpact.x_normal', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='y_normal', full_name='CDOTAUserMsg_UnitEvent.BloodImpact.y_normal', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3941,
serialized_end=4005,
)
_CDOTAUSERMSG_UNITEVENT_FADEGESTURE = _descriptor.Descriptor(
name='FadeGesture',
full_name='CDOTAUserMsg_UnitEvent.FadeGesture',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='activity', full_name='CDOTAUserMsg_UnitEvent.FadeGesture.activity', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=-1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4007,
serialized_end=4062,
)
_CDOTAUSERMSG_UNITEVENT = _descriptor.Descriptor(
name='CDOTAUserMsg_UnitEvent',
full_name='CDOTAUserMsg_UnitEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='msg_type', full_name='CDOTAUserMsg_UnitEvent.msg_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entity_index', full_name='CDOTAUserMsg_UnitEvent.entity_index', index=1,
number=2, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='speech', full_name='CDOTAUserMsg_UnitEvent.speech', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='speech_mute', full_name='CDOTAUserMsg_UnitEvent.speech_mute', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='add_gesture', full_name='CDOTAUserMsg_UnitEvent.add_gesture', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='remove_gesture', full_name='CDOTAUserMsg_UnitEvent.remove_gesture', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='blood_impact', full_name='CDOTAUserMsg_UnitEvent.blood_impact', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fade_gesture', full_name='CDOTAUserMsg_UnitEvent.fade_gesture', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='speech_match_on_client', full_name='CDOTAUserMsg_UnitEvent.speech_match_on_client', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_CDOTAUSERMSG_UNITEVENT_SPEECH, _CDOTAUSERMSG_UNITEVENT_SPEECHMUTE, _CDOTAUSERMSG_UNITEVENT_ADDGESTURE, _CDOTAUSERMSG_UNITEVENT_REMOVEGESTURE, _CDOTAUSERMSG_UNITEVENT_BLOODIMPACT, _CDOTAUSERMSG_UNITEVENT_FADEGESTURE, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=3118,
serialized_end=4062,
)
_CDOTAUSERMSG_ITEMPURCHASED = _descriptor.Descriptor(
name='CDOTAUserMsg_ItemPurchased',
full_name='CDOTAUserMsg_ItemPurchased',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='item_index', full_name='CDOTAUserMsg_ItemPurchased.item_index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4064,
serialized_end=4112,
)
_CDOTAUSERMSG_ITEMFOUND = _descriptor.Descriptor(
name='CDOTAUserMsg_ItemFound',
full_name='CDOTAUserMsg_ItemFound',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player', full_name='CDOTAUserMsg_ItemFound.player', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='quality', full_name='CDOTAUserMsg_ItemFound.quality', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='rarity', full_name='CDOTAUserMsg_ItemFound.rarity', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='method', full_name='CDOTAUserMsg_ItemFound.method', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='itemdef', full_name='CDOTAUserMsg_ItemFound.itemdef', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4114,
serialized_end=4220,
)
_CDOTAUSERMSG_PARTICLEMANAGER_RELEASEPARTICLEINDEX = _descriptor.Descriptor(
name='ReleaseParticleIndex',
full_name='CDOTAUserMsg_ParticleManager.ReleaseParticleIndex',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5331,
serialized_end=5353,
)
_CDOTAUSERMSG_PARTICLEMANAGER_CREATEPARTICLE = _descriptor.Descriptor(
name='CreateParticle',
full_name='CDOTAUserMsg_ParticleManager.CreateParticle',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='particle_name_index', full_name='CDOTAUserMsg_ParticleManager.CreateParticle.particle_name_index', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attach_type', full_name='CDOTAUserMsg_ParticleManager.CreateParticle.attach_type', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entity_handle', full_name='CDOTAUserMsg_ParticleManager.CreateParticle.entity_handle', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5355,
serialized_end=5444,
)
_CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLE = _descriptor.Descriptor(
name='DestroyParticle',
full_name='CDOTAUserMsg_ParticleManager.DestroyParticle',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='destroy_immediately', full_name='CDOTAUserMsg_ParticleManager.DestroyParticle.destroy_immediately', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5446,
serialized_end=5492,
)
_CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLEINVOLVING = _descriptor.Descriptor(
name='DestroyParticleInvolving',
full_name='CDOTAUserMsg_ParticleManager.DestroyParticleInvolving',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='destroy_immediately', full_name='CDOTAUserMsg_ParticleManager.DestroyParticleInvolving.destroy_immediately', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entity_handle', full_name='CDOTAUserMsg_ParticleManager.DestroyParticleInvolving.entity_handle', index=1,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5494,
serialized_end=5572,
)
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLE = _descriptor.Descriptor(
name='UpdateParticle',
full_name='CDOTAUserMsg_ParticleManager.UpdateParticle',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='control_point', full_name='CDOTAUserMsg_ParticleManager.UpdateParticle.control_point', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='position', full_name='CDOTAUserMsg_ParticleManager.UpdateParticle.position', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5574,
serialized_end=5644,
)
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFWD = _descriptor.Descriptor(
name='UpdateParticleFwd',
full_name='CDOTAUserMsg_ParticleManager.UpdateParticleFwd',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='control_point', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleFwd.control_point', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='forward', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleFwd.forward', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5646,
serialized_end=5718,
)
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEORIENT = _descriptor.Descriptor(
name='UpdateParticleOrient',
full_name='CDOTAUserMsg_ParticleManager.UpdateParticleOrient',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='control_point', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleOrient.control_point', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='forward', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleOrient.forward', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='right', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleOrient.right', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='up', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleOrient.up', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5721,
serialized_end=5849,
)
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFALLBACK = _descriptor.Descriptor(
name='UpdateParticleFallback',
full_name='CDOTAUserMsg_ParticleManager.UpdateParticleFallback',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='control_point', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleFallback.control_point', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='position', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleFallback.position', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5851,
serialized_end=5929,
)
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEOFFSET = _descriptor.Descriptor(
name='UpdateParticleOffset',
full_name='CDOTAUserMsg_ParticleManager.UpdateParticleOffset',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='control_point', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleOffset.control_point', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='origin_offset', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleOffset.origin_offset', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=5931,
serialized_end=6012,
)
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEENT = _descriptor.Descriptor(
name='UpdateParticleEnt',
full_name='CDOTAUserMsg_ParticleManager.UpdateParticleEnt',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='control_point', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleEnt.control_point', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entity_handle', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleEnt.entity_handle', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attach_type', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleEnt.attach_type', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='attachment', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleEnt.attachment', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='fallback_position', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleEnt.fallback_position', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6015,
serialized_end=6161,
)
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESETFROZEN = _descriptor.Descriptor(
name='UpdateParticleSetFrozen',
full_name='CDOTAUserMsg_ParticleManager.UpdateParticleSetFrozen',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='set_frozen', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleSetFrozen.set_frozen', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6163,
serialized_end=6208,
)
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESHOULDDRAW = _descriptor.Descriptor(
name='UpdateParticleShouldDraw',
full_name='CDOTAUserMsg_ParticleManager.UpdateParticleShouldDraw',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='should_draw', full_name='CDOTAUserMsg_ParticleManager.UpdateParticleShouldDraw.should_draw', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6210,
serialized_end=6257,
)
_CDOTAUSERMSG_PARTICLEMANAGER = _descriptor.Descriptor(
name='CDOTAUserMsg_ParticleManager',
full_name='CDOTAUserMsg_ParticleManager',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='type', full_name='CDOTAUserMsg_ParticleManager.type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='index', full_name='CDOTAUserMsg_ParticleManager.index', index=1,
number=2, type=13, cpp_type=3, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='release_particle_index', full_name='CDOTAUserMsg_ParticleManager.release_particle_index', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='create_particle', full_name='CDOTAUserMsg_ParticleManager.create_particle', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='destroy_particle', full_name='CDOTAUserMsg_ParticleManager.destroy_particle', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='destroy_particle_involving', full_name='CDOTAUserMsg_ParticleManager.destroy_particle_involving', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_particle', full_name='CDOTAUserMsg_ParticleManager.update_particle', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_particle_fwd', full_name='CDOTAUserMsg_ParticleManager.update_particle_fwd', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_particle_orient', full_name='CDOTAUserMsg_ParticleManager.update_particle_orient', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_particle_fallback', full_name='CDOTAUserMsg_ParticleManager.update_particle_fallback', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_particle_offset', full_name='CDOTAUserMsg_ParticleManager.update_particle_offset', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_particle_ent', full_name='CDOTAUserMsg_ParticleManager.update_particle_ent', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_particle_should_draw', full_name='CDOTAUserMsg_ParticleManager.update_particle_should_draw', index=12,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='update_particle_set_frozen', full_name='CDOTAUserMsg_ParticleManager.update_particle_set_frozen', index=13,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_CDOTAUSERMSG_PARTICLEMANAGER_RELEASEPARTICLEINDEX, _CDOTAUSERMSG_PARTICLEMANAGER_CREATEPARTICLE, _CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLE, _CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLEINVOLVING, _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLE, _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFWD, _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEORIENT, _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFALLBACK, _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEOFFSET, _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEENT, _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESETFROZEN, _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESHOULDDRAW, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=4223,
serialized_end=6257,
)
_CDOTAUSERMSG_OVERHEADEVENT = _descriptor.Descriptor(
name='CDOTAUserMsg_OverheadEvent',
full_name='CDOTAUserMsg_OverheadEvent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='message_type', full_name='CDOTAUserMsg_OverheadEvent.message_type', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='value', full_name='CDOTAUserMsg_OverheadEvent.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='target_player_entindex', full_name='CDOTAUserMsg_OverheadEvent.target_player_entindex', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='target_entindex', full_name='CDOTAUserMsg_OverheadEvent.target_entindex', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='source_player_entindex', full_name='CDOTAUserMsg_OverheadEvent.source_player_entindex', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6260,
serialized_end=6457,
)
_CDOTAUSERMSG_TUTORIALTIPINFO = _descriptor.Descriptor(
name='CDOTAUserMsg_TutorialTipInfo',
full_name='CDOTAUserMsg_TutorialTipInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='CDOTAUserMsg_TutorialTipInfo.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='progress', full_name='CDOTAUserMsg_TutorialTipInfo.progress', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6459,
serialized_end=6521,
)
_CDOTAUSERMSG_TUTORIALFINISH = _descriptor.Descriptor(
name='CDOTAUserMsg_TutorialFinish',
full_name='CDOTAUserMsg_TutorialFinish',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='heading', full_name='CDOTAUserMsg_TutorialFinish.heading', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='emblem', full_name='CDOTAUserMsg_TutorialFinish.emblem', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='body', full_name='CDOTAUserMsg_TutorialFinish.body', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='success', full_name='CDOTAUserMsg_TutorialFinish.success', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6523,
serialized_end=6616,
)
_CDOTAUSERMSG_SENDGENERICTOOLTIP = _descriptor.Descriptor(
name='CDOTAUserMsg_SendGenericToolTip',
full_name='CDOTAUserMsg_SendGenericToolTip',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='title', full_name='CDOTAUserMsg_SendGenericToolTip.title', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='text', full_name='CDOTAUserMsg_SendGenericToolTip.text', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entindex', full_name='CDOTAUserMsg_SendGenericToolTip.entindex', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='close', full_name='CDOTAUserMsg_SendGenericToolTip.close', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6618,
serialized_end=6713,
)
_CDOTAUSERMSG_WORLDLINE = _descriptor.Descriptor(
name='CDOTAUserMsg_WorldLine',
full_name='CDOTAUserMsg_WorldLine',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_WorldLine.player_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='worldline', full_name='CDOTAUserMsg_WorldLine.worldline', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6715,
serialized_end=6798,
)
_CDOTAUSERMSG_TOURNAMENTDROP = _descriptor.Descriptor(
name='CDOTAUserMsg_TournamentDrop',
full_name='CDOTAUserMsg_TournamentDrop',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='winner_name', full_name='CDOTAUserMsg_TournamentDrop.winner_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='event_type', full_name='CDOTAUserMsg_TournamentDrop.event_type', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6800,
serialized_end=6870,
)
_CDOTAUSERMSG_CHATWHEEL = _descriptor.Descriptor(
name='CDOTAUserMsg_ChatWheel',
full_name='CDOTAUserMsg_ChatWheel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='chat_message', full_name='CDOTAUserMsg_ChatWheel.chat_message', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_ChatWheel.player_id', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='account_id', full_name='CDOTAUserMsg_ChatWheel.account_id', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6872,
serialized_end=6996,
)
_CDOTAUSERMSG_RECEIVEDXMASGIFT = _descriptor.Descriptor(
name='CDOTAUserMsg_ReceivedXmasGift',
full_name='CDOTAUserMsg_ReceivedXmasGift',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_ReceivedXmasGift.player_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='item_name', full_name='CDOTAUserMsg_ReceivedXmasGift.item_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='inventory_slot', full_name='CDOTAUserMsg_ReceivedXmasGift.inventory_slot', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=6998,
serialized_end=7091,
)
_CDOTAUSERMSG_SHOWSURVEY = _descriptor.Descriptor(
name='CDOTAUserMsg_ShowSurvey',
full_name='CDOTAUserMsg_ShowSurvey',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='survey_id', full_name='CDOTAUserMsg_ShowSurvey.survey_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7093,
serialized_end=7137,
)
_CDOTAUSERMSG_UPDATESHAREDCONTENT = _descriptor.Descriptor(
name='CDOTAUserMsg_UpdateSharedContent',
full_name='CDOTAUserMsg_UpdateSharedContent',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='slot_type', full_name='CDOTAUserMsg_UpdateSharedContent.slot_type', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7139,
serialized_end=7192,
)
_CDOTAUSERMSG_TUTORIALREQUESTEXP = _descriptor.Descriptor(
name='CDOTAUserMsg_TutorialRequestExp',
full_name='CDOTAUserMsg_TutorialRequestExp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7194,
serialized_end=7227,
)
_CDOTAUSERMSG_TUTORIALFADE = _descriptor.Descriptor(
name='CDOTAUserMsg_TutorialFade',
full_name='CDOTAUserMsg_TutorialFade',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tgt_alpha', full_name='CDOTAUserMsg_TutorialFade.tgt_alpha', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7229,
serialized_end=7275,
)
_CDOTAUSERMSG_TUTORIALPINGMINIMAP = _descriptor.Descriptor(
name='CDOTAUserMsg_TutorialPingMinimap',
full_name='CDOTAUserMsg_TutorialPingMinimap',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_TutorialPingMinimap.player_id', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pos_x', full_name='CDOTAUserMsg_TutorialPingMinimap.pos_x', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pos_y', full_name='CDOTAUserMsg_TutorialPingMinimap.pos_y', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='pos_z', full_name='CDOTAUserMsg_TutorialPingMinimap.pos_z', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='entity_index', full_name='CDOTAUserMsg_TutorialPingMinimap.entity_index', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7277,
serialized_end=7397,
)
_CDOTA_UM_GAMERULESSTATECHANGED = _descriptor.Descriptor(
name='CDOTA_UM_GamerulesStateChanged',
full_name='CDOTA_UM_GamerulesStateChanged',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='state', full_name='CDOTA_UM_GamerulesStateChanged.state', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7399,
serialized_end=7446,
)
_CDOTAUSERMSG_ADDQUESTLOGENTRY = _descriptor.Descriptor(
name='CDOTAUserMsg_AddQuestLogEntry',
full_name='CDOTAUserMsg_AddQuestLogEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='npc_name', full_name='CDOTAUserMsg_AddQuestLogEntry.npc_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='npc_dialog', full_name='CDOTAUserMsg_AddQuestLogEntry.npc_dialog', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='quest', full_name='CDOTAUserMsg_AddQuestLogEntry.quest', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='quest_type', full_name='CDOTAUserMsg_AddQuestLogEntry.quest_type', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7448,
serialized_end=7552,
)
_CDOTAUSERMSG_SENDSTATPOPUP = _descriptor.Descriptor(
name='CDOTAUserMsg_SendStatPopup',
full_name='CDOTAUserMsg_SendStatPopup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_id', full_name='CDOTAUserMsg_SendStatPopup.player_id', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='statpopup', full_name='CDOTAUserMsg_SendStatPopup.statpopup', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7554,
serialized_end=7645,
)
_CDOTAUSERMSG_SENDROSHANPOPUP = _descriptor.Descriptor(
name='CDOTAUserMsg_SendRoshanPopup',
full_name='CDOTAUserMsg_SendRoshanPopup',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='reclaimed', full_name='CDOTAUserMsg_SendRoshanPopup.reclaimed', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='gametime', full_name='CDOTAUserMsg_SendRoshanPopup.gametime', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7647,
serialized_end=7714,
)
_CDOTAUSERMSG_SENDFINALGOLD = _descriptor.Descriptor(
name='CDOTAUserMsg_SendFinalGold',
full_name='CDOTAUserMsg_SendFinalGold',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='reliable_gold', full_name='CDOTAUserMsg_SendFinalGold.reliable_gold', index=0,
number=1, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='unreliable_gold', full_name='CDOTAUserMsg_SendFinalGold.unreliable_gold', index=1,
number=2, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=7716,
serialized_end=7792,
)
_CDOTAUSERMSG_CHATEVENT.fields_by_name['type'].enum_type = _DOTA_CHAT_MESSAGE
_CDOTAUSERMSG_COMBATLOGDATA.fields_by_name['type'].enum_type = _DOTA_COMBATLOG_TYPES
_CDOTAUSERMSG_COMBATHEROPOSITIONS.fields_by_name['world_pos'].message_type = networkbasetypes_pb2._CMSGVECTOR2D
_CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER_ABILITY.containing_type = _CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER;
_CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER.fields_by_name['abilities'].message_type = _CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER_ABILITY
_CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER.containing_type = _CDOTAUSERMSG_MINIKILLCAMINFO;
_CDOTAUSERMSG_MINIKILLCAMINFO.fields_by_name['attackers'].message_type = _CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER
_CDOTAUSERMSG_GLOBALLIGHTDIRECTION.fields_by_name['direction'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_LOCATIONPING.fields_by_name['location_ping'].message_type = dota_commonmessages_pb2._CDOTAMSG_LOCATIONPING
_CDOTAUSERMSG_ITEMALERT.fields_by_name['item_alert'].message_type = dota_commonmessages_pb2._CDOTAMSG_ITEMALERT
_CDOTAUSERMSG_MAPLINE.fields_by_name['mapline'].message_type = dota_commonmessages_pb2._CDOTAMSG_MAPLINE
_CDOTAUSERMSG_MINIMAPDEBUGPOINT.fields_by_name['location'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_CREATELINEARPROJECTILE.fields_by_name['origin'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_CREATELINEARPROJECTILE.fields_by_name['velocity'].message_type = networkbasetypes_pb2._CMSGVECTOR2D
_CDOTAUSERMSG_NEVERMOREREQUIEM.fields_by_name['origin'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTARESPONSEQUERYSERIALIZED_FACT.fields_by_name['valtype'].enum_type = _CDOTARESPONSEQUERYSERIALIZED_FACT_VALUETYPE
_CDOTARESPONSEQUERYSERIALIZED_FACT.containing_type = _CDOTARESPONSEQUERYSERIALIZED;
_CDOTARESPONSEQUERYSERIALIZED_FACT_VALUETYPE.containing_type = _CDOTARESPONSEQUERYSERIALIZED_FACT;
_CDOTARESPONSEQUERYSERIALIZED.fields_by_name['facts'].message_type = _CDOTARESPONSEQUERYSERIALIZED_FACT
_CDOTASPEECHMATCHONCLIENT.fields_by_name['responsequery'].message_type = _CDOTARESPONSEQUERYSERIALIZED
_CDOTAUSERMSG_UNITEVENT_SPEECH.containing_type = _CDOTAUSERMSG_UNITEVENT;
_CDOTAUSERMSG_UNITEVENT_SPEECHMUTE.containing_type = _CDOTAUSERMSG_UNITEVENT;
_CDOTAUSERMSG_UNITEVENT_ADDGESTURE.fields_by_name['activity'].enum_type = ai_activity_pb2._ACTIVITY
_CDOTAUSERMSG_UNITEVENT_ADDGESTURE.containing_type = _CDOTAUSERMSG_UNITEVENT;
_CDOTAUSERMSG_UNITEVENT_REMOVEGESTURE.fields_by_name['activity'].enum_type = ai_activity_pb2._ACTIVITY
_CDOTAUSERMSG_UNITEVENT_REMOVEGESTURE.containing_type = _CDOTAUSERMSG_UNITEVENT;
_CDOTAUSERMSG_UNITEVENT_BLOODIMPACT.containing_type = _CDOTAUSERMSG_UNITEVENT;
_CDOTAUSERMSG_UNITEVENT_FADEGESTURE.fields_by_name['activity'].enum_type = ai_activity_pb2._ACTIVITY
_CDOTAUSERMSG_UNITEVENT_FADEGESTURE.containing_type = _CDOTAUSERMSG_UNITEVENT;
_CDOTAUSERMSG_UNITEVENT.fields_by_name['msg_type'].enum_type = _EDOTAENTITYMESSAGES
_CDOTAUSERMSG_UNITEVENT.fields_by_name['speech'].message_type = _CDOTAUSERMSG_UNITEVENT_SPEECH
_CDOTAUSERMSG_UNITEVENT.fields_by_name['speech_mute'].message_type = _CDOTAUSERMSG_UNITEVENT_SPEECHMUTE
_CDOTAUSERMSG_UNITEVENT.fields_by_name['add_gesture'].message_type = _CDOTAUSERMSG_UNITEVENT_ADDGESTURE
_CDOTAUSERMSG_UNITEVENT.fields_by_name['remove_gesture'].message_type = _CDOTAUSERMSG_UNITEVENT_REMOVEGESTURE
_CDOTAUSERMSG_UNITEVENT.fields_by_name['blood_impact'].message_type = _CDOTAUSERMSG_UNITEVENT_BLOODIMPACT
_CDOTAUSERMSG_UNITEVENT.fields_by_name['fade_gesture'].message_type = _CDOTAUSERMSG_UNITEVENT_FADEGESTURE
_CDOTAUSERMSG_UNITEVENT.fields_by_name['speech_match_on_client'].message_type = _CDOTASPEECHMATCHONCLIENT
_CDOTAUSERMSG_PARTICLEMANAGER_RELEASEPARTICLEINDEX.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_CREATEPARTICLE.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLE.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLEINVOLVING.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLE.fields_by_name['position'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLE.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFWD.fields_by_name['forward'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFWD.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEORIENT.fields_by_name['forward'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEORIENT.fields_by_name['right'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEORIENT.fields_by_name['up'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEORIENT.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFALLBACK.fields_by_name['position'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFALLBACK.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEOFFSET.fields_by_name['origin_offset'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEOFFSET.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEENT.fields_by_name['fallback_position'].message_type = networkbasetypes_pb2._CMSGVECTOR
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEENT.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESETFROZEN.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESHOULDDRAW.containing_type = _CDOTAUSERMSG_PARTICLEMANAGER;
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['type'].enum_type = _DOTA_PARTICLE_MESSAGE
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['release_particle_index'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_RELEASEPARTICLEINDEX
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['create_particle'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_CREATEPARTICLE
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['destroy_particle'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLE
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['destroy_particle_involving'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLEINVOLVING
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['update_particle'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLE
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['update_particle_fwd'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFWD
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['update_particle_orient'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEORIENT
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['update_particle_fallback'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFALLBACK
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['update_particle_offset'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEOFFSET
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['update_particle_ent'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEENT
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['update_particle_should_draw'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESHOULDDRAW
_CDOTAUSERMSG_PARTICLEMANAGER.fields_by_name['update_particle_set_frozen'].message_type = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESETFROZEN
_CDOTAUSERMSG_OVERHEADEVENT.fields_by_name['message_type'].enum_type = _DOTA_OVERHEAD_ALERT
_CDOTAUSERMSG_WORLDLINE.fields_by_name['worldline'].message_type = dota_commonmessages_pb2._CDOTAMSG_WORLDLINE
_CDOTAUSERMSG_CHATWHEEL.fields_by_name['chat_message'].enum_type = dota_commonmessages_pb2._EDOTACHATWHEELMESSAGE
_CDOTAUSERMSG_SENDSTATPOPUP.fields_by_name['statpopup'].message_type = dota_commonmessages_pb2._CDOTAMSG_SENDSTATPOPUP
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_AIDebugLine'] = _CDOTAUSERMSG_AIDEBUGLINE
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_Ping'] = _CDOTAUSERMSG_PING
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_SwapVerify'] = _CDOTAUSERMSG_SWAPVERIFY
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_ChatEvent'] = _CDOTAUSERMSG_CHATEVENT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_CombatLogData'] = _CDOTAUSERMSG_COMBATLOGDATA
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_CombatLogShowDeath'] = _CDOTAUSERMSG_COMBATLOGSHOWDEATH
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_BotChat'] = _CDOTAUSERMSG_BOTCHAT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_CombatHeroPositions'] = _CDOTAUSERMSG_COMBATHEROPOSITIONS
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_MiniKillCamInfo'] = _CDOTAUSERMSG_MINIKILLCAMINFO
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_GlobalLightColor'] = _CDOTAUSERMSG_GLOBALLIGHTCOLOR
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_GlobalLightDirection'] = _CDOTAUSERMSG_GLOBALLIGHTDIRECTION
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_LocationPing'] = _CDOTAUSERMSG_LOCATIONPING
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_ItemAlert'] = _CDOTAUSERMSG_ITEMALERT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_MinimapEvent'] = _CDOTAUSERMSG_MINIMAPEVENT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_MapLine'] = _CDOTAUSERMSG_MAPLINE
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_MinimapDebugPoint'] = _CDOTAUSERMSG_MINIMAPDEBUGPOINT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_CreateLinearProjectile'] = _CDOTAUSERMSG_CREATELINEARPROJECTILE
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_DestroyLinearProjectile'] = _CDOTAUSERMSG_DESTROYLINEARPROJECTILE
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_DodgeTrackingProjectiles'] = _CDOTAUSERMSG_DODGETRACKINGPROJECTILES
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_SpectatorPlayerClick'] = _CDOTAUSERMSG_SPECTATORPLAYERCLICK
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_NevermoreRequiem'] = _CDOTAUSERMSG_NEVERMOREREQUIEM
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_InvalidCommand'] = _CDOTAUSERMSG_INVALIDCOMMAND
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_HudError'] = _CDOTAUSERMSG_HUDERROR
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_SharedCooldown'] = _CDOTAUSERMSG_SHAREDCOOLDOWN
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_SetNextAutobuyItem'] = _CDOTAUSERMSG_SETNEXTAUTOBUYITEM
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_HalloweenDrops'] = _CDOTAUSERMSG_HALLOWEENDROPS
DESCRIPTOR.message_types_by_name['CDOTAResponseQuerySerialized'] = _CDOTARESPONSEQUERYSERIALIZED
DESCRIPTOR.message_types_by_name['CDOTASpeechMatchOnClient'] = _CDOTASPEECHMATCHONCLIENT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_UnitEvent'] = _CDOTAUSERMSG_UNITEVENT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_ItemPurchased'] = _CDOTAUSERMSG_ITEMPURCHASED
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_ItemFound'] = _CDOTAUSERMSG_ITEMFOUND
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_ParticleManager'] = _CDOTAUSERMSG_PARTICLEMANAGER
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_OverheadEvent'] = _CDOTAUSERMSG_OVERHEADEVENT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_TutorialTipInfo'] = _CDOTAUSERMSG_TUTORIALTIPINFO
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_TutorialFinish'] = _CDOTAUSERMSG_TUTORIALFINISH
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_SendGenericToolTip'] = _CDOTAUSERMSG_SENDGENERICTOOLTIP
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_WorldLine'] = _CDOTAUSERMSG_WORLDLINE
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_TournamentDrop'] = _CDOTAUSERMSG_TOURNAMENTDROP
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_ChatWheel'] = _CDOTAUSERMSG_CHATWHEEL
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_ReceivedXmasGift'] = _CDOTAUSERMSG_RECEIVEDXMASGIFT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_ShowSurvey'] = _CDOTAUSERMSG_SHOWSURVEY
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_UpdateSharedContent'] = _CDOTAUSERMSG_UPDATESHAREDCONTENT
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_TutorialRequestExp'] = _CDOTAUSERMSG_TUTORIALREQUESTEXP
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_TutorialFade'] = _CDOTAUSERMSG_TUTORIALFADE
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_TutorialPingMinimap'] = _CDOTAUSERMSG_TUTORIALPINGMINIMAP
DESCRIPTOR.message_types_by_name['CDOTA_UM_GamerulesStateChanged'] = _CDOTA_UM_GAMERULESSTATECHANGED
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_AddQuestLogEntry'] = _CDOTAUSERMSG_ADDQUESTLOGENTRY
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_SendStatPopup'] = _CDOTAUSERMSG_SENDSTATPOPUP
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_SendRoshanPopup'] = _CDOTAUSERMSG_SENDROSHANPOPUP
DESCRIPTOR.message_types_by_name['CDOTAUserMsg_SendFinalGold'] = _CDOTAUSERMSG_SENDFINALGOLD
class CDOTAUserMsg_AIDebugLine(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_AIDEBUGLINE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_AIDebugLine)
class CDOTAUserMsg_Ping(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PING
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_Ping)
class CDOTAUserMsg_SwapVerify(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SWAPVERIFY
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_SwapVerify)
class CDOTAUserMsg_ChatEvent(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_CHATEVENT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ChatEvent)
class CDOTAUserMsg_CombatLogData(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_COMBATLOGDATA
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_CombatLogData)
class CDOTAUserMsg_CombatLogShowDeath(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_COMBATLOGSHOWDEATH
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_CombatLogShowDeath)
class CDOTAUserMsg_BotChat(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_BOTCHAT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_BotChat)
class CDOTAUserMsg_CombatHeroPositions(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_COMBATHEROPOSITIONS
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_CombatHeroPositions)
class CDOTAUserMsg_MiniKillCamInfo(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
class Attacker(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
class Ability(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER_ABILITY
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_MiniKillCamInfo.Attacker.Ability)
DESCRIPTOR = _CDOTAUSERMSG_MINIKILLCAMINFO_ATTACKER
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_MiniKillCamInfo.Attacker)
DESCRIPTOR = _CDOTAUSERMSG_MINIKILLCAMINFO
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_MiniKillCamInfo)
class CDOTAUserMsg_GlobalLightColor(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_GLOBALLIGHTCOLOR
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_GlobalLightColor)
class CDOTAUserMsg_GlobalLightDirection(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_GLOBALLIGHTDIRECTION
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_GlobalLightDirection)
class CDOTAUserMsg_LocationPing(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_LOCATIONPING
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_LocationPing)
class CDOTAUserMsg_ItemAlert(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_ITEMALERT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ItemAlert)
class CDOTAUserMsg_MinimapEvent(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_MINIMAPEVENT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_MinimapEvent)
class CDOTAUserMsg_MapLine(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_MAPLINE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_MapLine)
class CDOTAUserMsg_MinimapDebugPoint(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_MINIMAPDEBUGPOINT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_MinimapDebugPoint)
class CDOTAUserMsg_CreateLinearProjectile(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_CREATELINEARPROJECTILE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_CreateLinearProjectile)
class CDOTAUserMsg_DestroyLinearProjectile(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_DESTROYLINEARPROJECTILE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_DestroyLinearProjectile)
class CDOTAUserMsg_DodgeTrackingProjectiles(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_DODGETRACKINGPROJECTILES
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_DodgeTrackingProjectiles)
class CDOTAUserMsg_SpectatorPlayerClick(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SPECTATORPLAYERCLICK
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_SpectatorPlayerClick)
class CDOTAUserMsg_NevermoreRequiem(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_NEVERMOREREQUIEM
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_NevermoreRequiem)
class CDOTAUserMsg_InvalidCommand(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_INVALIDCOMMAND
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_InvalidCommand)
class CDOTAUserMsg_HudError(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_HUDERROR
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_HudError)
class CDOTAUserMsg_SharedCooldown(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SHAREDCOOLDOWN
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_SharedCooldown)
class CDOTAUserMsg_SetNextAutobuyItem(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SETNEXTAUTOBUYITEM
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_SetNextAutobuyItem)
class CDOTAUserMsg_HalloweenDrops(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_HALLOWEENDROPS
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_HalloweenDrops)
class CDOTAResponseQuerySerialized(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
class Fact(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTARESPONSEQUERYSERIALIZED_FACT
# @@protoc_insertion_point(class_scope:CDOTAResponseQuerySerialized.Fact)
DESCRIPTOR = _CDOTARESPONSEQUERYSERIALIZED
# @@protoc_insertion_point(class_scope:CDOTAResponseQuerySerialized)
class CDOTASpeechMatchOnClient(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTASPEECHMATCHONCLIENT
# @@protoc_insertion_point(class_scope:CDOTASpeechMatchOnClient)
class CDOTAUserMsg_UnitEvent(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
class Speech(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_UNITEVENT_SPEECH
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_UnitEvent.Speech)
class SpeechMute(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_UNITEVENT_SPEECHMUTE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_UnitEvent.SpeechMute)
class AddGesture(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_UNITEVENT_ADDGESTURE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_UnitEvent.AddGesture)
class RemoveGesture(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_UNITEVENT_REMOVEGESTURE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_UnitEvent.RemoveGesture)
class BloodImpact(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_UNITEVENT_BLOODIMPACT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_UnitEvent.BloodImpact)
class FadeGesture(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_UNITEVENT_FADEGESTURE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_UnitEvent.FadeGesture)
DESCRIPTOR = _CDOTAUSERMSG_UNITEVENT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_UnitEvent)
class CDOTAUserMsg_ItemPurchased(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_ITEMPURCHASED
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ItemPurchased)
class CDOTAUserMsg_ItemFound(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_ITEMFOUND
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ItemFound)
class CDOTAUserMsg_ParticleManager(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
class ReleaseParticleIndex(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_RELEASEPARTICLEINDEX
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.ReleaseParticleIndex)
class CreateParticle(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_CREATEPARTICLE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.CreateParticle)
class DestroyParticle(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.DestroyParticle)
class DestroyParticleInvolving(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_DESTROYPARTICLEINVOLVING
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.DestroyParticleInvolving)
class UpdateParticle(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.UpdateParticle)
class UpdateParticleFwd(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFWD
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.UpdateParticleFwd)
class UpdateParticleOrient(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEORIENT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.UpdateParticleOrient)
class UpdateParticleFallback(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEFALLBACK
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.UpdateParticleFallback)
class UpdateParticleOffset(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEOFFSET
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.UpdateParticleOffset)
class UpdateParticleEnt(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLEENT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.UpdateParticleEnt)
class UpdateParticleSetFrozen(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESETFROZEN
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.UpdateParticleSetFrozen)
class UpdateParticleShouldDraw(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER_UPDATEPARTICLESHOULDDRAW
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager.UpdateParticleShouldDraw)
DESCRIPTOR = _CDOTAUSERMSG_PARTICLEMANAGER
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ParticleManager)
class CDOTAUserMsg_OverheadEvent(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_OVERHEADEVENT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_OverheadEvent)
class CDOTAUserMsg_TutorialTipInfo(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_TUTORIALTIPINFO
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_TutorialTipInfo)
class CDOTAUserMsg_TutorialFinish(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_TUTORIALFINISH
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_TutorialFinish)
class CDOTAUserMsg_SendGenericToolTip(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SENDGENERICTOOLTIP
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_SendGenericToolTip)
class CDOTAUserMsg_WorldLine(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_WORLDLINE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_WorldLine)
class CDOTAUserMsg_TournamentDrop(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_TOURNAMENTDROP
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_TournamentDrop)
class CDOTAUserMsg_ChatWheel(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_CHATWHEEL
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ChatWheel)
class CDOTAUserMsg_ReceivedXmasGift(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_RECEIVEDXMASGIFT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ReceivedXmasGift)
class CDOTAUserMsg_ShowSurvey(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SHOWSURVEY
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_ShowSurvey)
class CDOTAUserMsg_UpdateSharedContent(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_UPDATESHAREDCONTENT
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_UpdateSharedContent)
class CDOTAUserMsg_TutorialRequestExp(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_TUTORIALREQUESTEXP
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_TutorialRequestExp)
class CDOTAUserMsg_TutorialFade(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_TUTORIALFADE
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_TutorialFade)
class CDOTAUserMsg_TutorialPingMinimap(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_TUTORIALPINGMINIMAP
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_TutorialPingMinimap)
class CDOTA_UM_GamerulesStateChanged(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTA_UM_GAMERULESSTATECHANGED
# @@protoc_insertion_point(class_scope:CDOTA_UM_GamerulesStateChanged)
class CDOTAUserMsg_AddQuestLogEntry(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_ADDQUESTLOGENTRY
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_AddQuestLogEntry)
class CDOTAUserMsg_SendStatPopup(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SENDSTATPOPUP
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_SendStatPopup)
class CDOTAUserMsg_SendRoshanPopup(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SENDROSHANPOPUP
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_SendRoshanPopup)
class CDOTAUserMsg_SendFinalGold(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CDOTAUSERMSG_SENDFINALGOLD
# @@protoc_insertion_point(class_scope:CDOTAUserMsg_SendFinalGold)
# @@protoc_insertion_point(module_scope)
| mit |
hickford/youtube-dl | youtube_dl/extractor/rtlnl.py | 102 | 5637 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_duration,
)
class RtlNlIE(InfoExtractor):
IE_NAME = 'rtl.nl'
IE_DESC = 'rtl.nl and rtlxl.nl'
_VALID_URL = r'''(?x)
https?://(?:www\.)?
(?:
rtlxl\.nl/\#!/[^/]+/|
rtl\.nl/system/videoplayer/(?:[^/]+/)+(?:video_)?embed\.html\b.+?\buuid=
)
(?P<id>[0-9a-f-]+)'''
_TESTS = [{
'url': 'http://www.rtlxl.nl/#!/rtl-nieuws-132237/6e4203a6-0a5e-3596-8424-c599a59e0677',
'md5': 'cc16baa36a6c169391f0764fa6b16654',
'info_dict': {
'id': '6e4203a6-0a5e-3596-8424-c599a59e0677',
'ext': 'mp4',
'title': 'RTL Nieuws - Laat',
'description': 'md5:6b61f66510c8889923b11f2778c72dc5',
'timestamp': 1408051800,
'upload_date': '20140814',
'duration': 576.880,
},
}, {
'url': 'http://www.rtl.nl/system/videoplayer/derden/rtlnieuws/video_embed.html#uuid=84ae5571-ac25-4225-ae0c-ef8d9efb2aed/autoplay=false',
'md5': 'dea7474214af1271d91ef332fb8be7ea',
'info_dict': {
'id': '84ae5571-ac25-4225-ae0c-ef8d9efb2aed',
'ext': 'mp4',
'timestamp': 1424039400,
'title': 'RTL Nieuws - Nieuwe beelden Kopenhagen: chaos direct na aanslag',
'thumbnail': 're:^https?://screenshots\.rtl\.nl/system/thumb/sz=[0-9]+x[0-9]+/uuid=84ae5571-ac25-4225-ae0c-ef8d9efb2aed$',
'upload_date': '20150215',
'description': 'Er zijn nieuwe beelden vrijgegeven die vlak na de aanslag in Kopenhagen zijn gemaakt. Op de video is goed te zien hoe omstanders zich bekommeren om één van de slachtoffers, terwijl de eerste agenten ter plaatse komen.',
}
}, {
# empty synopsis and missing episodes (see https://github.com/rg3/youtube-dl/issues/6275)
'url': 'http://www.rtl.nl/system/videoplayer/derden/rtlnieuws/video_embed.html#uuid=f536aac0-1dc3-4314-920e-3bd1c5b3811a/autoplay=false',
'info_dict': {
'id': 'f536aac0-1dc3-4314-920e-3bd1c5b3811a',
'ext': 'mp4',
'title': 'RTL Nieuws - Meer beelden van overval juwelier',
'thumbnail': 're:^https?://screenshots\.rtl\.nl/system/thumb/sz=[0-9]+x[0-9]+/uuid=f536aac0-1dc3-4314-920e-3bd1c5b3811a$',
'timestamp': 1437233400,
'upload_date': '20150718',
'duration': 30.474,
},
'params': {
'skip_download': True,
},
}, {
# encrypted m3u8 streams, georestricted
'url': 'http://www.rtlxl.nl/#!/afl-2-257632/52a74543-c504-4cde-8aa8-ec66fe8d68a7',
'only_matching': True,
}, {
'url': 'http://www.rtl.nl/system/videoplayer/derden/embed.html#!/uuid=bb0353b0-d6a4-1dad-90e9-18fe75b8d1f0',
'only_matching': True,
}]
def _real_extract(self, url):
uuid = self._match_id(url)
info = self._download_json(
'http://www.rtl.nl/system/s4m/vfd/version=2/uuid=%s/fmt=adaptive/' % uuid,
uuid)
material = info['material'][0]
title = info['abstracts'][0]['name']
subtitle = material.get('title')
if subtitle:
title += ' - %s' % subtitle
description = material.get('synopsis')
meta = info.get('meta', {})
# m3u8 streams are encrypted and may not be handled properly by older ffmpeg/avconv.
# To workaround this previously adaptive -> flash trick was used to obtain
# unencrypted m3u8 streams (see https://github.com/rg3/youtube-dl/issues/4118)
# and bypass georestrictions as well.
# Currently, unencrypted m3u8 playlists are (intentionally?) invalid and therefore
# unusable albeit can be fixed by simple string replacement (see
# https://github.com/rg3/youtube-dl/pull/6337)
# Since recent ffmpeg and avconv handle encrypted streams just fine encrypted
# streams are used now.
videopath = material['videopath']
m3u8_url = meta.get('videohost', 'http://manifest.us.rtl.nl') + videopath
formats = self._extract_m3u8_formats(m3u8_url, uuid, ext='mp4')
video_urlpart = videopath.split('/adaptive/')[1][:-5]
PG_URL_TEMPLATE = 'http://pg.us.rtl.nl/rtlxl/network/%s/progressive/%s.mp4'
formats.extend([
{
'url': PG_URL_TEMPLATE % ('a2m', video_urlpart),
'format_id': 'pg-sd',
},
{
'url': PG_URL_TEMPLATE % ('a3m', video_urlpart),
'format_id': 'pg-hd',
'quality': 0,
}
])
self._sort_formats(formats)
thumbnails = []
for p in ('poster_base_url', '"thumb_base_url"'):
if not meta.get(p):
continue
thumbnails.append({
'url': self._proto_relative_url(meta[p] + uuid),
'width': int_or_none(self._search_regex(
r'/sz=([0-9]+)', meta[p], 'thumbnail width', fatal=False)),
'height': int_or_none(self._search_regex(
r'/sz=[0-9]+x([0-9]+)',
meta[p], 'thumbnail height', fatal=False))
})
return {
'id': uuid,
'title': title,
'formats': formats,
'timestamp': material['original_date'],
'description': description,
'duration': parse_duration(material.get('duration')),
'thumbnails': thumbnails,
}
| unlicense |
slagle/ansible-modules-extras | windows/win_dotnet_ngen.py | 138 | 1769 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Peter Mounce <public@neverrunwithscissors.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
DOCUMENTATION = '''
---
module: win_dotnet_ngen
version_added: "2.0"
short_description: Runs ngen to recompile DLLs after .NET updates
description:
- After .NET framework is installed/updated, Windows will probably want to recompile things to optimise for the host.
- This happens via scheduled task, usually at some inopportune time.
- This module allows you to run this task on your own schedule, so you incur the CPU hit at some more convenient and controlled time.
- "http://blogs.msdn.com/b/dotnet/archive/2013/08/06/wondering-why-mscorsvw-exe-has-high-cpu-usage-you-can-speed-it-up.aspx"
notes:
- there are in fact two scheduled tasks for ngen but they have no triggers so aren't a problem
- there's no way to test if they've been completed (?)
- the stdout is quite likely to be several megabytes
author: Peter Mounce
'''
EXAMPLES = '''
# Run ngen tasks
win_dotnet_ngen:
'''
| gpl-3.0 |
arnaud-morvan/QGIS | python/plugins/processing/algs/qgis/RandomSelection.py | 8 | 4391 | # -*- coding: utf-8 -*-
"""
***************************************************************************
RandomSelection.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
import random
from qgis.PyQt.QtGui import QIcon
from qgis.core import (QgsApplication,
QgsFeatureSink,
QgsProcessingException,
QgsProcessingUtils,
QgsProcessingAlgorithm,
QgsProcessingParameterVectorLayer,
QgsProcessingParameterEnum,
QgsProcessingParameterNumber,
QgsProcessingParameterFeatureSink,
QgsProcessingOutputVectorLayer)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class RandomSelection(QgisAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
METHOD = 'METHOD'
NUMBER = 'NUMBER'
def icon(self):
return QgsApplication.getThemeIcon("/algorithms/mAlgorithmSelectRandom.svg")
def svgIconPath(self):
return QgsApplication.iconPath("/algorithms/mAlgorithmSelectRandom.svg")
def group(self):
return self.tr('Vector selection')
def groupId(self):
return 'vectorselection'
def flags(self):
return super().flags() | QgsProcessingAlgorithm.FlagNoThreading
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.methods = [self.tr('Number of selected features'),
self.tr('Percentage of selected features')]
self.addParameter(QgsProcessingParameterVectorLayer(self.INPUT,
self.tr('Input layer')))
self.addParameter(QgsProcessingParameterEnum(self.METHOD,
self.tr('Method'), self.methods, False, 0))
self.addParameter(QgsProcessingParameterNumber(self.NUMBER,
self.tr('Number/percentage of selected features'), QgsProcessingParameterNumber.Integer,
10, False, 0.0))
self.addOutput(QgsProcessingOutputVectorLayer(self.OUTPUT, self.tr('Selected (random)')))
def name(self):
return 'randomselection'
def displayName(self):
return self.tr('Random selection')
def processAlgorithm(self, parameters, context, feedback):
layer = self.parameterAsVectorLayer(parameters, self.INPUT, context)
method = self.parameterAsEnum(parameters, self.METHOD, context)
ids = layer.allFeatureIds()
value = self.parameterAsInt(parameters, self.NUMBER, context)
if method == 0:
if value > len(ids):
raise QgsProcessingException(
self.tr('Selected number is greater than feature count. '
'Choose a lower value and try again.'))
else:
if value > 100:
raise QgsProcessingException(
self.tr("Percentage can't be greater than 100. Set a "
"different value and try again."))
value = int(round(value / 100.0, 4) * len(ids))
selran = random.sample(ids, value)
layer.selectByIds(selran)
return {self.OUTPUT: parameters[self.INPUT]}
| gpl-2.0 |
tootedom/moto | moto/ec2/responses/network_acls.py | 7 | 6341 | from __future__ import unicode_literals
from moto.core.responses import BaseResponse
from moto.ec2.utils import filters_from_querystring, \
network_acl_ids_from_querystring
class NetworkACLs(BaseResponse):
def create_network_acl(self):
vpc_id = self.querystring.get('VpcId')[0]
network_acl = self.ec2_backend.create_network_acl(vpc_id)
template = self.response_template(CREATE_NETWORK_ACL_RESPONSE)
return template.render(network_acl=network_acl)
def create_network_acl_entry(self):
network_acl_id = self.querystring.get('NetworkAclId')[0]
rule_number = self.querystring.get('RuleNumber')[0]
protocol = self.querystring.get('Protocol')[0]
rule_action = self.querystring.get('RuleAction')[0]
egress = self.querystring.get('Egress')[0]
cidr_block = self.querystring.get('CidrBlock')[0]
icmp_code = self.querystring.get('Icmp.Code', [None])[0]
icmp_type = self.querystring.get('Icmp.Type', [None])[0]
port_range_from = self.querystring.get('PortRange.From')[0]
port_range_to = self.querystring.get('PortRange.To')[0]
network_acl_entry = self.ec2_backend.create_network_acl_entry(
network_acl_id, rule_number, protocol, rule_action,
egress, cidr_block, icmp_code, icmp_type,
port_range_from, port_range_to)
template = self.response_template(CREATE_NETWORK_ACL_ENTRY_RESPONSE)
return template.render(network_acl_entry=network_acl_entry)
def delete_network_acl(self):
network_acl_id = self.querystring.get('NetworkAclId')[0]
self.ec2_backend.delete_network_acl(network_acl_id)
template = self.response_template(DELETE_NETWORK_ACL_ASSOCIATION)
return template.render()
def delete_network_acl_entry(self):
raise NotImplementedError(
'NetworkACLs(AmazonVPC).delete_network_acl_entry is not yet implemented')
def describe_network_acls(self):
network_acl_ids = network_acl_ids_from_querystring(self.querystring)
filters = filters_from_querystring(self.querystring)
network_acls = self.ec2_backend.get_all_network_acls(network_acl_ids, filters)
template = self.response_template(DESCRIBE_NETWORK_ACL_RESPONSE)
return template.render(network_acls=network_acls)
def replace_network_acl_association(self):
association_id = self.querystring.get('AssociationId')[0]
network_acl_id = self.querystring.get('NetworkAclId')[0]
association = self.ec2_backend.replace_network_acl_association(
association_id,
network_acl_id
)
template = self.response_template(REPLACE_NETWORK_ACL_ASSOCIATION)
return template.render(association=association)
def replace_network_acl_entry(self):
raise NotImplementedError(
'NetworkACLs(AmazonVPC).replace_network_acl_entry is not yet implemented')
CREATE_NETWORK_ACL_RESPONSE = """
<CreateNetworkAclResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<networkAcl>
<networkAclId>{{ network_acl.id }}</networkAclId>
<vpcId>{{ network_acl.vpc_id }}</vpcId>
<default>false</default>
<entrySet/>
<associationSet/>
<tagSet>
{% for tag in network_acl.get_tags() %}
<item>
<resourceId>{{ tag.resource_id }}</resourceId>
<resourceType>{{ tag.resource_type }}</resourceType>
<key>{{ tag.key }}</key>
<value>{{ tag.value }}</value>
</item>
{% endfor %}
</tagSet>
</networkAcl>
</CreateNetworkAclResponse>
"""
DESCRIBE_NETWORK_ACL_RESPONSE = """
<DescribeNetworkAclsResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<networkAclSet>
{% for network_acl in network_acls %}
<item>
<networkAclId>{{ network_acl.id }}</networkAclId>
<vpcId>{{ network_acl.vpc_id }}</vpcId>
<default>{{ network_acl.default }}</default>
<entrySet>
{% for entry in network_acl.network_acl_entries %}
<item>
<ruleNumber>{{ entry.rule_number }}</ruleNumber>
<protocol>{{ entry.protocol }}</protocol>
<ruleAction>{{ entry.rule_action }}</ruleAction>
<egress>{{ entry.egress.lower() }}</egress>
<cidrBlock>{{ entry.cidr_block }}</cidrBlock>
{% if entry.port_range_from or entry.port_range_to %}
<portRange>
<from>{{ entry.port_range_from }}</from>
<to>{{ entry.port_range_to }}</to>
</portRange>
{% endif %}
</item>
{% endfor %}
</entrySet>
<associationSet>
{% for association in network_acl.associations.values() %}
<item>
<networkAclAssociationId>{{ association.id }}</networkAclAssociationId>
<networkAclId>{{ association.network_acl_id }}</networkAclId>
<subnetId>{{ association.subnet_id }}</subnetId>
</item>
{% endfor %}
</associationSet>
<tagSet>
{% for tag in network_acl.get_tags() %}
<item>
<resourceId>{{ tag.resource_id }}</resourceId>
<resourceType>{{ tag.resource_type }}</resourceType>
<key>{{ tag.key }}</key>
<value>{{ tag.value }}</value>
</item>
{% endfor %}
</tagSet>
</item>
{% endfor %}
</networkAclSet>
</DescribeNetworkAclsResponse>
"""
CREATE_NETWORK_ACL_ENTRY_RESPONSE = """
<CreateNetworkAclEntryResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</CreateNetworkAclEntryResponse>
"""
REPLACE_NETWORK_ACL_ASSOCIATION = """
<ReplaceNetworkAclAssociationResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<newAssociationId>{{ association.new_association_id }}</newAssociationId>
</ReplaceNetworkAclAssociationResponse>
"""
DELETE_NETWORK_ACL_ASSOCIATION = """
<DeleteNetworkAclResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-15/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DeleteNetworkAclResponse>
"""
| apache-2.0 |
mengli/MachineLearning | self_driving/segnet/train.py | 2 | 4996 | """Train SegNet with camvid dataset.
nohup python -u -m self_driving.segnet.train > self_driving/segnet/output.txt 2>&1 &
"""
import os
import tensorflow as tf
from utils import camvid
import segnet_vgg
LOG_DIR = 'save'
EPOCH = 6000
BATCH_SIZE = 4
IMAGE_HEIGHT = 720
IMAGE_WIDTH = 960
IMAGE_CHANNEL = 3
NUM_CLASSES = 32
INITIAL_LEARNING_RATE = 0.0001
image_dir = "/usr/local/google/home/limeng/Downloads/camvid/train.txt"
val_dir = "/usr/local/google/home/limeng/Downloads/camvid/val.txt"
def loss(logits, labels):
logits = tf.reshape(logits, [-1, NUM_CLASSES])
labels = tf.reshape(labels, [-1])
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=labels, name='cross_entropy')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy_mean')
tf.add_to_collection('losses', cross_entropy_mean)
return tf.add_n(tf.get_collection('losses'), name='total_loss')
def train(total_loss):
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
global_step = tf.Variable(0, name='global_step', trainable=False)
learning_rate = tf.train.exponential_decay(
INITIAL_LEARNING_RATE, global_step, EPOCH * 0.2, 0.9, staircase=True)
tf.summary.scalar('total_loss', total_loss)
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
return optimizer.minimize(total_loss, global_step=global_step)
def main(_):
image_filenames, label_filenames = camvid.get_filename_list(image_dir)
val_image_filenames, val_label_filenames = camvid.get_filename_list(val_dir)
with tf.Graph().as_default():
with tf.device('/cpu:0'):
# config = tf.ConfigProto(device_count = {'GPU': 0})
config = tf.ConfigProto()
config.gpu_options.allocator_type = 'BFC'
sess = tf.InteractiveSession(config=config)
train_data = tf.placeholder(tf.float32,
shape=[BATCH_SIZE,
IMAGE_HEIGHT,
IMAGE_WIDTH,
IMAGE_CHANNEL],
name='train_data')
train_labels = tf.placeholder(tf.int64,
shape=[BATCH_SIZE, IMAGE_HEIGHT, IMAGE_WIDTH, 1],
name='train_labels')
is_training = tf.placeholder(tf.bool, name='is_training')
images, labels = camvid.CamVidInputs(image_filenames,
label_filenames,
BATCH_SIZE)
val_images, val_labels = camvid.CamVidInputs(val_image_filenames,
val_label_filenames,
BATCH_SIZE)
logits = segnet_vgg.inference(train_data, is_training, NUM_CLASSES)
total_loss = loss(logits, train_labels)
train_op = train(total_loss)
check_op = tf.add_check_numerics_ops()
merged_summary_op = tf.summary.merge_all()
summary_writer = tf.summary.FileWriter('train', sess.graph)
saver = tf.train.Saver(write_version=tf.train.SaverDef.V2)
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
checkpoint_path = os.path.join(LOG_DIR, "segnet.ckpt")
sess.run(tf.global_variables_initializer())
# Start the queue runners.
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
for i in range(EPOCH):
image_batch, label_batch = sess.run([images, labels])
feed_dict = {
train_data: image_batch,
train_labels: label_batch,
is_training: True
}
_, _, _, summary = sess.run([train_op, total_loss, check_op, merged_summary_op],
feed_dict=feed_dict)
if i % 10 == 0:
print("Start validating...")
val_images_batch, val_labels_batch = sess.run([val_images, val_labels])
loss_value = total_loss.eval(feed_dict={train_data: val_images_batch,
train_labels: val_labels_batch,
is_training: True})
print("Epoch: %d, Loss: %g" % (i, loss_value))
saver.save(sess, checkpoint_path)
# write logs at every iteration
summary_writer.add_summary(summary, i)
coord.request_stop()
coord.join(threads)
if __name__ == '__main__':
tf.app.run(main=main)
| apache-2.0 |
trondhindenes/ansible | test/integration/targets/inventory_plugin_config/test_inventory.py | 92 | 1318 | from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
name: test_inventory
plugin_type: inventory
authors:
- Pierre-Louis Bonicoli (@pilou-)
short_description: test inventory
description:
- test inventory (fetch parameters using config API)
options:
departments:
description: test parameter
type: list
default:
- seine-et-marne
- haute-garonne
required: False
'''
EXAMPLES = '''
# Example command line: ansible-inventory --list -i test_inventory.yml
plugin: test_inventory
departments:
- paris
'''
from ansible.plugins.inventory import BaseInventoryPlugin
class InventoryModule(BaseInventoryPlugin):
NAME = 'test_inventory'
def verify_file(self, path):
return True
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
self._read_config_data(path=path)
departments = self.get_option('departments')
group = 'test_group'
host = 'test_host'
self.inventory.add_group(group)
self.inventory.add_host(group=group, host=host)
self.inventory.set_variable(host, 'departments', departments)
| gpl-3.0 |
pepitogithub/PythonScripts | musica/drumExFachade.py | 1 | 2964 | # import pygame
import threading
import drumExMachina
class Fasade:
"""
Matrix
-> reproducir()
-> pausar()
-> salir()
-> volumen-general()
-> tempo()
-> figura()
-> agregar-pista()
-> quitar-pista()
-> activar-pista()
-> desactivar-pista()
Pista
-> nombre()
-> setear-instrumento()
-> canal-midi()
-> volumen()
-> duracion()
-> activar()
-> desactivar()
-> editar-partitura()
-> agregar-seccion()
-> quitar-seccion()
-> modificar-seccion()
Partitura
-> agregar-seccion()
-> quitar-seccion()
-> modificar-seccion()
Seccion
-> duracion()
-> pulsos()
-> ritmo()
-> rotacion()
-> notas()
"""
def __init__(self):
pass
class _DXFConsola:
salir_codes = [0, "0", "salir", "exit"]
def __init__(self):
pass
def loop(self):
salir = False
while not salir:
user_input = input("> ")
salir = user_input in self.salir_codes
class _DXFGrafico:
def __init__(self, ancho=1200, alto=800):
self.alto = alto
self.ancho = ancho
self.screen = pygame.display.set_mode([self.ancho, self.alto])
pygame.display.set_caption("Drum Ex Machina")
def loop(self):
self.engine.loop()
pygame.init()
clock = pygame.time.Clock()
salir = False
while not salir:
for event in pygame.event.get():
if event.type == pygame.QUIT:
salir = True
pygame.draw.rect(self.screen, [255,0,0], [75, 10, 50, 20] , 1)
pygame.display.flip()
pygame.time.delay(50)
class DrumExFacade:
"""
Interfaz de DrumExMachina.
Tiene dos modos de uso, consola y grafico.
"""
def __init__(self, modo='consola', ancho=1200, alto=800):
self.modo = modo
self.engine = None
# Modo: Consola | grafico
self.engine = _DXFConsola() if modo == 'consola' else _DXFGrafico(alto, ancho)
def loop(self):
DXM_thread = threading.Thread(target=drumExMachina.testeos)
DXM_thread.start()
self.engine.loop()
DXF_thread.exit()
DXM_thread.exit()
DXF = DrumExFacade("consola")
DXF_thread = threading.Thread(target=DXF.loop)
DXF_thread.start() | gpl-2.0 |
jeffery9/mixprint_addons | ineco_jasper_report/__openerp__.py | 1 | 1946 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 INECO PARTNERSHIP LIMITED (http://openerp.tititab.com)
# All Right Reserved
#
# Author : Tititab Srisookco (thitithup@gmail.com)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
{
"name" : "Ineco Jasper Report Plug-in",
"description" : """
1. Please install M2CRYPTO by sudo apt-get install python-m2crypto
""",
"version" : "0.1",
"depends" : ["base"],
"author" : "INECO",
"category": "Report Tools",
"url": "http://openerp.tititab.com",
"data": [ ],
'update_xml': [
'ineco_jasper_report_view.xml',
'security.xml',
],
'init_xml': [ ],
'demo_xml': [ ],
'test':[ ],
"installable" : True,
"active" : False,
}
| agpl-3.0 |
souravbadami/zulip | zilencer/management/commands/compare_messages.py | 37 | 1381 | from __future__ import absolute_import
from __future__ import print_function
from typing import Any
from six.moves import zip
import ijson
from django.core.management.base import BaseCommand, CommandParser
class Command(BaseCommand):
help = """
Render messages to a file.
Usage: ./manage.py render_messages <destination> <--amount>
"""
def add_arguments(self, parser):
# type: (CommandParser) -> None
parser.add_argument('dump1', help='First file to compare')
parser.add_argument('dump2', help='Second file to compare')
def handle(self, *args, **options):
# type: (*Any, **Any) -> None
total_count = 0
changed_count = 0
with open(options['dump1'], 'r') as dump1, open(options['dump2'], 'r') as dump2:
for m1, m2 in zip(ijson.items(dump1, 'item'), ijson.items(dump2, 'item')):
total_count += 1
if m1['id'] != m2['id']:
self.stderr.write('Inconsistent messages dump')
break
if m1['content'] != m2['content']:
changed_count += 1
self.stdout.write('Changed message id: {id}'.format(id=m1['id']))
self.stdout.write('Total messages: {count}'.format(count=total_count))
self.stdout.write('Changed messages: {count}'.format(count=changed_count))
| apache-2.0 |
yjmade/odoo | addons/l10n_ae/__init__.py | 669 | 1059 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Tech Receptives (<http://techreceptives.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
adelina-t/nova | nova/db/sqlalchemy/migrate_repo/versions/233_add_stats_in_compute_nodes.py | 81 | 1460 | # Copyright (c) 2014 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import Text
def upgrade(engine):
meta = MetaData()
meta.bind = engine
# Drop the compute_node_stats table and add a 'stats' column to
# compute_nodes directly. The data itself is transient and doesn't
# need to be copied over.
table_names = ('compute_node_stats', 'shadow_compute_node_stats')
for table_name in table_names:
table = Table(table_name, meta, autoload=True)
table.drop()
# Add a new stats column to compute nodes
table_names = ('compute_nodes', 'shadow_compute_nodes')
for table_name in table_names:
table = Table(table_name, meta, autoload=True)
stats = Column('stats', Text, default='{}')
table.create_column(stats)
| apache-2.0 |
mderomph-coolblue/dd-agent | tests/checks/mock/test_hdfs_namenode.py | 16 | 3402 | # Project
from tests.checks.common import AgentCheckTest, Fixtures
# 3rd Party
import mock
import json
# Namenode URI
NAMENODE_JMX_URI = 'http://localhost:50070/jmx'
# Namesystem state URL
NAME_SYSTEM_STATE_URL = NAMENODE_JMX_URI + '?qry=Hadoop:service=NameNode,name=FSNamesystemState'
# Namesystem url
NAME_SYSTEM_URL = NAMENODE_JMX_URI + '?qry=Hadoop:service=NameNode,name=FSNamesystem'
def requests_get_mock(*args, **kwargs):
class MockResponse:
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
def json(self):
print self.json_data
return json.loads(self.json_data)
def raise_for_status(self):
return True
print 'DEBUG: {0}'.format(args[0])
print NAME_SYSTEM_STATE_URL
if args[0] == NAME_SYSTEM_STATE_URL:
print 'here'
with open(Fixtures.file('hdfs_namesystem_state'), 'r') as f:
body = f.read()
return MockResponse(body, 200)
elif args[0] == NAME_SYSTEM_URL:
print 'here'
with open(Fixtures.file('hdfs_namesystem'), 'r') as f:
body = f.read()
return MockResponse(body, 200)
class HDFSNameNode(AgentCheckTest):
CHECK_NAME = 'hdfs_namenode'
HDFS_NAMENODE_CONFIG = {
'hdfs_namenode_jmx_uri': 'http://localhost:50070'
}
HDFS_NAMESYSTEM_STATE_METRICS_VALUES = {
'hdfs.namenode.capacity_total': 41167421440,
'hdfs.namenode.capacity_used': 501932032,
'hdfs.namenode.capacity_remaining': 27878948864,
'hdfs.namenode.capacity_in_use': None, # Don't test the value as it's a float
'hdfs.namenode.total_load': 2,
'hdfs.namenode.fs_lock_queue_length': 0,
'hdfs.namenode.blocks_total': 27661,
'hdfs.namenode.max_objects': 0,
'hdfs.namenode.files_total': 82950,
'hdfs.namenode.pending_replication_blocks': 0,
'hdfs.namenode.under_replicated_blocks': 27661,
'hdfs.namenode.scheduled_replication_blocks': 0,
'hdfs.namenode.pending_deletion_blocks': 0,
'hdfs.namenode.num_live_data_nodes': 1,
'hdfs.namenode.num_dead_data_nodes': 0,
'hdfs.namenode.num_decom_live_data_nodes': 0,
'hdfs.namenode.num_decom_dead_data_nodes': 0,
'hdfs.namenode.volume_failures_total': 0,
'hdfs.namenode.estimated_capacity_lost_total': 0,
'hdfs.namenode.num_decommissioning_data_nodes': 0,
'hdfs.namenode.num_stale_data_nodes': 0,
'hdfs.namenode.num_stale_storages': 0,
}
HDFS_NAMESYSTEM_METRICS_VALUES = {
'hdfs.namenode.missing_blocks': 0,
'hdfs.namenode.corrupt_blocks': 1,
}
HDFS_NAMESYSTEM_METRIC_TAGS = [
'namenode_url:' + HDFS_NAMENODE_CONFIG['hdfs_namenode_jmx_uri']
]
@mock.patch('requests.get', side_effect=requests_get_mock)
def test_check(self, mock_requests):
config = {
'instances': [self.HDFS_NAMENODE_CONFIG]
}
self.run_check(config)
for metric, value in self.HDFS_NAMESYSTEM_STATE_METRICS_VALUES.iteritems():
self.assertMetric(metric, value=value, tags=self.HDFS_NAMESYSTEM_METRIC_TAGS)
for metric, value in self.HDFS_NAMESYSTEM_METRICS_VALUES.iteritems():
self.assertMetric(metric, value=value, tags=self.HDFS_NAMESYSTEM_METRIC_TAGS)
| bsd-3-clause |
gnowxilef/youtube-dl | youtube_dl/extractor/nobelprize.py | 68 | 2123 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
js_to_json,
mimetype2ext,
determine_ext,
update_url_query,
get_element_by_attribute,
int_or_none,
)
class NobelPrizeIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?nobelprize\.org/mediaplayer.*?\bid=(?P<id>\d+)'
_TEST = {
'url': 'http://www.nobelprize.org/mediaplayer/?id=2636',
'md5': '04c81e5714bb36cc4e2232fee1d8157f',
'info_dict': {
'id': '2636',
'ext': 'mp4',
'title': 'Announcement of the 2016 Nobel Prize in Physics',
'description': 'md5:05beba57f4f5a4bbd4cf2ef28fcff739',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
media = self._parse_json(self._search_regex(
r'(?s)var\s*config\s*=\s*({.+?});', webpage,
'config'), video_id, js_to_json)['media']
title = media['title']
formats = []
for source in media.get('source', []):
source_src = source.get('src')
if not source_src:
continue
ext = mimetype2ext(source.get('type')) or determine_ext(source_src)
if ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
source_src, video_id, 'mp4', 'm3u8_native',
m3u8_id='hls', fatal=False))
elif ext == 'f4m':
formats.extend(self._extract_f4m_formats(
update_url_query(source_src, {'hdcore': '3.7.0'}),
video_id, f4m_id='hds', fatal=False))
else:
formats.append({
'url': source_src,
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': get_element_by_attribute('itemprop', 'description', webpage),
'duration': int_or_none(media.get('duration')),
'formats': formats,
}
| unlicense |
rlr/kitsune | kitsune/users/migrations/0001_initial.py | 18 | 37466 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
import kitsune.search.models
import kitsune.sumo.models
import timezones.fields
from django.conf import settings
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Deactivation',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date', models.DateTimeField(default=datetime.datetime.now)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='EmailChange',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activation_key', models.CharField(max_length=40, verbose_name='activation key')),
('email', models.EmailField(max_length=75, null=True, db_index=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Profile',
fields=[
('user', models.OneToOneField(primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL, verbose_name='User')),
('name', models.CharField(max_length=255, null=True, verbose_name='Display name', blank=True)),
('public_email', models.BooleanField(default=False, verbose_name='Make my email public')),
('avatar', models.ImageField(max_length=250, upload_to=b'uploads/avatars/', null=True, verbose_name='Avatar', blank=True)),
('bio', models.TextField(null=True, verbose_name='Biography', blank=True)),
('website', models.URLField(max_length=255, null=True, verbose_name='Website', blank=True)),
('twitter', models.CharField(blank=True, max_length=15, null=True, verbose_name='Twitter Username', validators=[django.core.validators.RegexValidator(b'^[\\w]+$', message=b'Please enter correct Twitter Handle', code=b'Invalid name')])),
('facebook', models.URLField(max_length=255, null=True, verbose_name='Facebook URL', blank=True)),
('mozillians', models.CharField(max_length=255, null=True, verbose_name='Mozillians Username', blank=True)),
('irc_handle', models.CharField(max_length=255, null=True, verbose_name='IRC nickname', blank=True)),
('timezone', timezones.fields.TimeZoneField(default=b'US/Pacific', choices=[(b'Africa/Abidjan', b'(GMT+0000) Africa/Abidjan'), (b'Africa/Accra', b'(GMT+0000) Africa/Accra'), (b'Africa/Addis_Ababa', b'(GMT+0300) Africa/Addis_Ababa'), (b'Africa/Algiers', b'(GMT+0100) Africa/Algiers'), (b'Africa/Asmara', b'(GMT+0300) Africa/Asmara'), (b'Africa/Bamako', b'(GMT+0000) Africa/Bamako'), (b'Africa/Bangui', b'(GMT+0100) Africa/Bangui'), (b'Africa/Banjul', b'(GMT+0000) Africa/Banjul'), (b'Africa/Bissau', b'(GMT+0000) Africa/Bissau'), (b'Africa/Blantyre', b'(GMT+0200) Africa/Blantyre'), (b'Africa/Brazzaville', b'(GMT+0100) Africa/Brazzaville'), (b'Africa/Bujumbura', b'(GMT+0200) Africa/Bujumbura'), (b'Africa/Cairo', b'(GMT+0200) Africa/Cairo'), (b'Africa/Casablanca', b'(GMT+0000) Africa/Casablanca'), (b'Africa/Ceuta', b'(GMT+0200) Africa/Ceuta'), (b'Africa/Conakry', b'(GMT+0000) Africa/Conakry'), (b'Africa/Dakar', b'(GMT+0000) Africa/Dakar'), (b'Africa/Dar_es_Salaam', b'(GMT+0300) Africa/Dar_es_Salaam'), (b'Africa/Djibouti', b'(GMT+0300) Africa/Djibouti'), (b'Africa/Douala', b'(GMT+0100) Africa/Douala'), (b'Africa/El_Aaiun', b'(GMT+0000) Africa/El_Aaiun'), (b'Africa/Freetown', b'(GMT+0000) Africa/Freetown'), (b'Africa/Gaborone', b'(GMT+0200) Africa/Gaborone'), (b'Africa/Harare', b'(GMT+0200) Africa/Harare'), (b'Africa/Johannesburg', b'(GMT+0200) Africa/Johannesburg'), (b'Africa/Juba', b'(GMT+0300) Africa/Juba'), (b'Africa/Kampala', b'(GMT+0300) Africa/Kampala'), (b'Africa/Khartoum', b'(GMT+0300) Africa/Khartoum'), (b'Africa/Kigali', b'(GMT+0200) Africa/Kigali'), (b'Africa/Kinshasa', b'(GMT+0100) Africa/Kinshasa'), (b'Africa/Lagos', b'(GMT+0100) Africa/Lagos'), (b'Africa/Libreville', b'(GMT+0100) Africa/Libreville'), (b'Africa/Lome', b'(GMT+0000) Africa/Lome'), (b'Africa/Luanda', b'(GMT+0100) Africa/Luanda'), (b'Africa/Lubumbashi', b'(GMT+0200) Africa/Lubumbashi'), (b'Africa/Lusaka', b'(GMT+0200) Africa/Lusaka'), (b'Africa/Malabo', b'(GMT+0100) Africa/Malabo'), (b'Africa/Maputo', b'(GMT+0200) Africa/Maputo'), (b'Africa/Maseru', b'(GMT+0200) Africa/Maseru'), (b'Africa/Mbabane', b'(GMT+0200) Africa/Mbabane'), (b'Africa/Mogadishu', b'(GMT+0300) Africa/Mogadishu'), (b'Africa/Monrovia', b'(GMT+0000) Africa/Monrovia'), (b'Africa/Nairobi', b'(GMT+0300) Africa/Nairobi'), (b'Africa/Ndjamena', b'(GMT+0100) Africa/Ndjamena'), (b'Africa/Niamey', b'(GMT+0100) Africa/Niamey'), (b'Africa/Nouakchott', b'(GMT+0000) Africa/Nouakchott'), (b'Africa/Ouagadougou', b'(GMT+0000) Africa/Ouagadougou'), (b'Africa/Porto-Novo', b'(GMT+0100) Africa/Porto-Novo'), (b'Africa/Sao_Tome', b'(GMT+0000) Africa/Sao_Tome'), (b'Africa/Tripoli', b'(GMT+0200) Africa/Tripoli'), (b'Africa/Tunis', b'(GMT+0100) Africa/Tunis'), (b'Africa/Windhoek', b'(GMT+0200) Africa/Windhoek'), (b'America/Adak', b'(GMT-0900) America/Adak'), (b'America/Anchorage', b'(GMT-0800) America/Anchorage'), (b'America/Anguilla', b'(GMT-0400) America/Anguilla'), (b'America/Antigua', b'(GMT-0400) America/Antigua'), (b'America/Araguaina', b'(GMT-0300) America/Araguaina'), (b'America/Argentina/Buenos_Aires', b'(GMT-0300) America/Argentina/Buenos_Aires'), (b'America/Argentina/Catamarca', b'(GMT-0300) America/Argentina/Catamarca'), (b'America/Argentina/Cordoba', b'(GMT-0300) America/Argentina/Cordoba'), (b'America/Argentina/Jujuy', b'(GMT-0300) America/Argentina/Jujuy'), (b'America/Argentina/La_Rioja', b'(GMT-0300) America/Argentina/La_Rioja'), (b'America/Argentina/Mendoza', b'(GMT-0300) America/Argentina/Mendoza'), (b'America/Argentina/Rio_Gallegos', b'(GMT-0300) America/Argentina/Rio_Gallegos'), (b'America/Argentina/Salta', b'(GMT-0300) America/Argentina/Salta'), (b'America/Argentina/San_Juan', b'(GMT-0300) America/Argentina/San_Juan'), (b'America/Argentina/San_Luis', b'(GMT-0300) America/Argentina/San_Luis'), (b'America/Argentina/Tucuman', b'(GMT-0300) America/Argentina/Tucuman'), (b'America/Argentina/Ushuaia', b'(GMT-0300) America/Argentina/Ushuaia'), (b'America/Aruba', b'(GMT-0400) America/Aruba'), (b'America/Asuncion', b'(GMT-0300) America/Asuncion'), (b'America/Atikokan', b'(GMT-0500) America/Atikokan'), (b'America/Bahia', b'(GMT-0300) America/Bahia'), (b'America/Bahia_Banderas', b'(GMT-0600) America/Bahia_Banderas'), (b'America/Barbados', b'(GMT-0400) America/Barbados'), (b'America/Belem', b'(GMT-0300) America/Belem'), (b'America/Belize', b'(GMT-0600) America/Belize'), (b'America/Blanc-Sablon', b'(GMT-0400) America/Blanc-Sablon'), (b'America/Boa_Vista', b'(GMT-0400) America/Boa_Vista'), (b'America/Bogota', b'(GMT-0500) America/Bogota'), (b'America/Boise', b'(GMT-0600) America/Boise'), (b'America/Cambridge_Bay', b'(GMT-0600) America/Cambridge_Bay'), (b'America/Campo_Grande', b'(GMT-0400) America/Campo_Grande'), (b'America/Cancun', b'(GMT-0600) America/Cancun'), (b'America/Caracas', b'(GMT-0430) America/Caracas'), (b'America/Cayenne', b'(GMT-0300) America/Cayenne'), (b'America/Cayman', b'(GMT-0500) America/Cayman'), (b'America/Chicago', b'(GMT-0500) America/Chicago'), (b'America/Chihuahua', b'(GMT-0700) America/Chihuahua'), (b'America/Costa_Rica', b'(GMT-0600) America/Costa_Rica'), (b'America/Creston', b'(GMT-0700) America/Creston'), (b'America/Cuiaba', b'(GMT-0400) America/Cuiaba'), (b'America/Curacao', b'(GMT-0400) America/Curacao'), (b'America/Danmarkshavn', b'(GMT+0000) America/Danmarkshavn'), (b'America/Dawson', b'(GMT-0700) America/Dawson'), (b'America/Dawson_Creek', b'(GMT-0700) America/Dawson_Creek'), (b'America/Denver', b'(GMT-0600) America/Denver'), (b'America/Detroit', b'(GMT-0400) America/Detroit'), (b'America/Dominica', b'(GMT-0400) America/Dominica'), (b'America/Edmonton', b'(GMT-0600) America/Edmonton'), (b'America/Eirunepe', b'(GMT-0400) America/Eirunepe'), (b'America/El_Salvador', b'(GMT-0600) America/El_Salvador'), (b'America/Fortaleza', b'(GMT-0300) America/Fortaleza'), (b'America/Glace_Bay', b'(GMT-0300) America/Glace_Bay'), (b'America/Godthab', b'(GMT-0200) America/Godthab'), (b'America/Goose_Bay', b'(GMT-0300) America/Goose_Bay'), (b'America/Grand_Turk', b'(GMT-0400) America/Grand_Turk'), (b'America/Grenada', b'(GMT-0400) America/Grenada'), (b'America/Guadeloupe', b'(GMT-0400) America/Guadeloupe'), (b'America/Guatemala', b'(GMT-0600) America/Guatemala'), (b'America/Guayaquil', b'(GMT-0500) America/Guayaquil'), (b'America/Guyana', b'(GMT-0400) America/Guyana'), (b'America/Halifax', b'(GMT-0300) America/Halifax'), (b'America/Havana', b'(GMT-0400) America/Havana'), (b'America/Hermosillo', b'(GMT-0700) America/Hermosillo'), (b'America/Indiana/Indianapolis', b'(GMT-0400) America/Indiana/Indianapolis'), (b'America/Indiana/Knox', b'(GMT-0500) America/Indiana/Knox'), (b'America/Indiana/Marengo', b'(GMT-0400) America/Indiana/Marengo'), (b'America/Indiana/Petersburg', b'(GMT-0400) America/Indiana/Petersburg'), (b'America/Indiana/Tell_City', b'(GMT-0500) America/Indiana/Tell_City'), (b'America/Indiana/Vevay', b'(GMT-0400) America/Indiana/Vevay'), (b'America/Indiana/Vincennes', b'(GMT-0400) America/Indiana/Vincennes'), (b'America/Indiana/Winamac', b'(GMT-0400) America/Indiana/Winamac'), (b'America/Inuvik', b'(GMT-0600) America/Inuvik'), (b'America/Iqaluit', b'(GMT-0400) America/Iqaluit'), (b'America/Jamaica', b'(GMT-0500) America/Jamaica'), (b'America/Juneau', b'(GMT-0800) America/Juneau'), (b'America/Kentucky/Louisville', b'(GMT-0400) America/Kentucky/Louisville'), (b'America/Kentucky/Monticello', b'(GMT-0400) America/Kentucky/Monticello'), (b'America/Kralendijk', b'(GMT-0400) America/Kralendijk'), (b'America/La_Paz', b'(GMT-0400) America/La_Paz'), (b'America/Lima', b'(GMT-0500) America/Lima'), (b'America/Los_Angeles', b'(GMT-0700) America/Los_Angeles'), (b'America/Lower_Princes', b'(GMT-0400) America/Lower_Princes'), (b'America/Maceio', b'(GMT-0300) America/Maceio'), (b'America/Managua', b'(GMT-0600) America/Managua'), (b'America/Manaus', b'(GMT-0400) America/Manaus'), (b'America/Marigot', b'(GMT-0400) America/Marigot'), (b'America/Martinique', b'(GMT-0400) America/Martinique'), (b'America/Matamoros', b'(GMT-0500) America/Matamoros'), (b'America/Mazatlan', b'(GMT-0700) America/Mazatlan'), (b'America/Menominee', b'(GMT-0500) America/Menominee'), (b'America/Merida', b'(GMT-0600) America/Merida'), (b'America/Metlakatla', b'(GMT-0800) America/Metlakatla'), (b'America/Mexico_City', b'(GMT-0600) America/Mexico_City'), (b'America/Miquelon', b'(GMT-0200) America/Miquelon'), (b'America/Moncton', b'(GMT-0300) America/Moncton'), (b'America/Monterrey', b'(GMT-0600) America/Monterrey'), (b'America/Montevideo', b'(GMT-0300) America/Montevideo'), (b'America/Montreal', b'(GMT-0400) America/Montreal'), (b'America/Montserrat', b'(GMT-0400) America/Montserrat'), (b'America/Nassau', b'(GMT-0400) America/Nassau'), (b'America/New_York', b'(GMT-0400) America/New_York'), (b'America/Nipigon', b'(GMT-0400) America/Nipigon'), (b'America/Nome', b'(GMT-0800) America/Nome'), (b'America/Noronha', b'(GMT-0200) America/Noronha'), (b'America/North_Dakota/Beulah', b'(GMT-0500) America/North_Dakota/Beulah'), (b'America/North_Dakota/Center', b'(GMT-0500) America/North_Dakota/Center'), (b'America/North_Dakota/New_Salem', b'(GMT-0500) America/North_Dakota/New_Salem'), (b'America/Ojinaga', b'(GMT-0600) America/Ojinaga'), (b'America/Panama', b'(GMT-0500) America/Panama'), (b'America/Pangnirtung', b'(GMT-0400) America/Pangnirtung'), (b'America/Paramaribo', b'(GMT-0300) America/Paramaribo'), (b'America/Phoenix', b'(GMT-0700) America/Phoenix'), (b'America/Port-au-Prince', b'(GMT-0400) America/Port-au-Prince'), (b'America/Port_of_Spain', b'(GMT-0400) America/Port_of_Spain'), (b'America/Porto_Velho', b'(GMT-0400) America/Porto_Velho'), (b'America/Puerto_Rico', b'(GMT-0400) America/Puerto_Rico'), (b'America/Rainy_River', b'(GMT-0500) America/Rainy_River'), (b'America/Rankin_Inlet', b'(GMT-0500) America/Rankin_Inlet'), (b'America/Recife', b'(GMT-0300) America/Recife'), (b'America/Regina', b'(GMT-0600) America/Regina'), (b'America/Resolute', b'(GMT-0500) America/Resolute'), (b'America/Rio_Branco', b'(GMT-0400) America/Rio_Branco'), (b'America/Santa_Isabel', b'(GMT-0800) America/Santa_Isabel'), (b'America/Santarem', b'(GMT-0300) America/Santarem'), (b'America/Santiago', b'(GMT-0300) America/Santiago'), (b'America/Santo_Domingo', b'(GMT-0400) America/Santo_Domingo'), (b'America/Sao_Paulo', b'(GMT-0300) America/Sao_Paulo'), (b'America/Scoresbysund', b'(GMT+0000) America/Scoresbysund'), (b'America/Shiprock', b'(GMT-0600) America/Shiprock'), (b'America/Sitka', b'(GMT-0800) America/Sitka'), (b'America/St_Barthelemy', b'(GMT-0400) America/St_Barthelemy'), (b'America/St_Johns', b'(GMT-0230) America/St_Johns'), (b'America/St_Kitts', b'(GMT-0400) America/St_Kitts'), (b'America/St_Lucia', b'(GMT-0400) America/St_Lucia'), (b'America/St_Thomas', b'(GMT-0400) America/St_Thomas'), (b'America/St_Vincent', b'(GMT-0400) America/St_Vincent'), (b'America/Swift_Current', b'(GMT-0600) America/Swift_Current'), (b'America/Tegucigalpa', b'(GMT-0600) America/Tegucigalpa'), (b'America/Thule', b'(GMT-0300) America/Thule'), (b'America/Thunder_Bay', b'(GMT-0400) America/Thunder_Bay'), (b'America/Tijuana', b'(GMT-0700) America/Tijuana'), (b'America/Toronto', b'(GMT-0400) America/Toronto'), (b'America/Tortola', b'(GMT-0400) America/Tortola'), (b'America/Vancouver', b'(GMT-0700) America/Vancouver'), (b'America/Whitehorse', b'(GMT-0700) America/Whitehorse'), (b'America/Winnipeg', b'(GMT-0500) America/Winnipeg'), (b'America/Yakutat', b'(GMT-0800) America/Yakutat'), (b'America/Yellowknife', b'(GMT-0600) America/Yellowknife'), (b'Antarctica/Casey', b'(GMT+0800) Antarctica/Casey'), (b'Antarctica/Davis', b'(GMT+0700) Antarctica/Davis'), (b'Antarctica/DumontDUrville', b'(GMT+1000) Antarctica/DumontDUrville'), (b'Antarctica/Macquarie', b'(GMT+1100) Antarctica/Macquarie'), (b'Antarctica/Mawson', b'(GMT+0500) Antarctica/Mawson'), (b'Antarctica/McMurdo', b'(GMT+1300) Antarctica/McMurdo'), (b'Antarctica/Palmer', b'(GMT-0300) Antarctica/Palmer'), (b'Antarctica/Rothera', b'(GMT-0300) Antarctica/Rothera'), (b'Antarctica/South_Pole', b'(GMT+1300) Antarctica/South_Pole'), (b'Antarctica/Syowa', b'(GMT+0300) Antarctica/Syowa'), (b'Antarctica/Vostok', b'(GMT+0600) Antarctica/Vostok'), (b'Arctic/Longyearbyen', b'(GMT+0200) Arctic/Longyearbyen'), (b'Asia/Aden', b'(GMT+0300) Asia/Aden'), (b'Asia/Almaty', b'(GMT+0600) Asia/Almaty'), (b'Asia/Amman', b'(GMT+0300) Asia/Amman'), (b'Asia/Anadyr', b'(GMT+1200) Asia/Anadyr'), (b'Asia/Aqtau', b'(GMT+0500) Asia/Aqtau'), (b'Asia/Aqtobe', b'(GMT+0500) Asia/Aqtobe'), (b'Asia/Ashgabat', b'(GMT+0500) Asia/Ashgabat'), (b'Asia/Baghdad', b'(GMT+0300) Asia/Baghdad'), (b'Asia/Bahrain', b'(GMT+0300) Asia/Bahrain'), (b'Asia/Baku', b'(GMT+0500) Asia/Baku'), (b'Asia/Bangkok', b'(GMT+0700) Asia/Bangkok'), (b'Asia/Beirut', b'(GMT+0300) Asia/Beirut'), (b'Asia/Bishkek', b'(GMT+0600) Asia/Bishkek'), (b'Asia/Brunei', b'(GMT+0800) Asia/Brunei'), (b'Asia/Choibalsan', b'(GMT+0800) Asia/Choibalsan'), (b'Asia/Chongqing', b'(GMT+0800) Asia/Chongqing'), (b'Asia/Colombo', b'(GMT+0530) Asia/Colombo'), (b'Asia/Damascus', b'(GMT+0300) Asia/Damascus'), (b'Asia/Dhaka', b'(GMT+0600) Asia/Dhaka'), (b'Asia/Dili', b'(GMT+0900) Asia/Dili'), (b'Asia/Dubai', b'(GMT+0400) Asia/Dubai'), (b'Asia/Dushanbe', b'(GMT+0500) Asia/Dushanbe'), (b'Asia/Gaza', b'(GMT+0200) Asia/Gaza'), (b'Asia/Harbin', b'(GMT+0800) Asia/Harbin'), (b'Asia/Hebron', b'(GMT+0200) Asia/Hebron'), (b'Asia/Ho_Chi_Minh', b'(GMT+0700) Asia/Ho_Chi_Minh'), (b'Asia/Hong_Kong', b'(GMT+0800) Asia/Hong_Kong'), (b'Asia/Hovd', b'(GMT+0700) Asia/Hovd'), (b'Asia/Irkutsk', b'(GMT+0900) Asia/Irkutsk'), (b'Asia/Jakarta', b'(GMT+0700) Asia/Jakarta'), (b'Asia/Jayapura', b'(GMT+0900) Asia/Jayapura'), (b'Asia/Jerusalem', b'(GMT+0300) Asia/Jerusalem'), (b'Asia/Kabul', b'(GMT+0430) Asia/Kabul'), (b'Asia/Kamchatka', b'(GMT+1200) Asia/Kamchatka'), (b'Asia/Karachi', b'(GMT+0500) Asia/Karachi'), (b'Asia/Kashgar', b'(GMT+0800) Asia/Kashgar'), (b'Asia/Kathmandu', b'(GMT+0545) Asia/Kathmandu'), (b'Asia/Khandyga', b'(GMT+1000) Asia/Khandyga'), (b'Asia/Kolkata', b'(GMT+0530) Asia/Kolkata'), (b'Asia/Krasnoyarsk', b'(GMT+0800) Asia/Krasnoyarsk'), (b'Asia/Kuala_Lumpur', b'(GMT+0800) Asia/Kuala_Lumpur'), (b'Asia/Kuching', b'(GMT+0800) Asia/Kuching'), (b'Asia/Kuwait', b'(GMT+0300) Asia/Kuwait'), (b'Asia/Macau', b'(GMT+0800) Asia/Macau'), (b'Asia/Magadan', b'(GMT+1200) Asia/Magadan'), (b'Asia/Makassar', b'(GMT+0800) Asia/Makassar'), (b'Asia/Manila', b'(GMT+0800) Asia/Manila'), (b'Asia/Muscat', b'(GMT+0400) Asia/Muscat'), (b'Asia/Nicosia', b'(GMT+0300) Asia/Nicosia'), (b'Asia/Novokuznetsk', b'(GMT+0700) Asia/Novokuznetsk'), (b'Asia/Novosibirsk', b'(GMT+0700) Asia/Novosibirsk'), (b'Asia/Omsk', b'(GMT+0700) Asia/Omsk'), (b'Asia/Oral', b'(GMT+0500) Asia/Oral'), (b'Asia/Phnom_Penh', b'(GMT+0700) Asia/Phnom_Penh'), (b'Asia/Pontianak', b'(GMT+0700) Asia/Pontianak'), (b'Asia/Pyongyang', b'(GMT+0900) Asia/Pyongyang'), (b'Asia/Qatar', b'(GMT+0300) Asia/Qatar'), (b'Asia/Qyzylorda', b'(GMT+0600) Asia/Qyzylorda'), (b'Asia/Rangoon', b'(GMT+0630) Asia/Rangoon'), (b'Asia/Riyadh', b'(GMT+0300) Asia/Riyadh'), (b'Asia/Sakhalin', b'(GMT+1100) Asia/Sakhalin'), (b'Asia/Samarkand', b'(GMT+0500) Asia/Samarkand'), (b'Asia/Seoul', b'(GMT+0900) Asia/Seoul'), (b'Asia/Shanghai', b'(GMT+0800) Asia/Shanghai'), (b'Asia/Singapore', b'(GMT+0800) Asia/Singapore'), (b'Asia/Taipei', b'(GMT+0800) Asia/Taipei'), (b'Asia/Tashkent', b'(GMT+0500) Asia/Tashkent'), (b'Asia/Tbilisi', b'(GMT+0400) Asia/Tbilisi'), (b'Asia/Tehran', b'(GMT+0430) Asia/Tehran'), (b'Asia/Thimphu', b'(GMT+0600) Asia/Thimphu'), (b'Asia/Tokyo', b'(GMT+0900) Asia/Tokyo'), (b'Asia/Ulaanbaatar', b'(GMT+0800) Asia/Ulaanbaatar'), (b'Asia/Urumqi', b'(GMT+0800) Asia/Urumqi'), (b'Asia/Ust-Nera', b'(GMT+1100) Asia/Ust-Nera'), (b'Asia/Vientiane', b'(GMT+0700) Asia/Vientiane'), (b'Asia/Vladivostok', b'(GMT+1100) Asia/Vladivostok'), (b'Asia/Yakutsk', b'(GMT+1000) Asia/Yakutsk'), (b'Asia/Yekaterinburg', b'(GMT+0600) Asia/Yekaterinburg'), (b'Asia/Yerevan', b'(GMT+0400) Asia/Yerevan'), (b'Atlantic/Azores', b'(GMT+0000) Atlantic/Azores'), (b'Atlantic/Bermuda', b'(GMT-0300) Atlantic/Bermuda'), (b'Atlantic/Canary', b'(GMT+0100) Atlantic/Canary'), (b'Atlantic/Cape_Verde', b'(GMT-0100) Atlantic/Cape_Verde'), (b'Atlantic/Faroe', b'(GMT+0100) Atlantic/Faroe'), (b'Atlantic/Madeira', b'(GMT+0100) Atlantic/Madeira'), (b'Atlantic/Reykjavik', b'(GMT+0000) Atlantic/Reykjavik'), (b'Atlantic/South_Georgia', b'(GMT-0200) Atlantic/South_Georgia'), (b'Atlantic/St_Helena', b'(GMT+0000) Atlantic/St_Helena'), (b'Atlantic/Stanley', b'(GMT-0300) Atlantic/Stanley'), (b'Australia/Adelaide', b'(GMT+1030) Australia/Adelaide'), (b'Australia/Brisbane', b'(GMT+1000) Australia/Brisbane'), (b'Australia/Broken_Hill', b'(GMT+1030) Australia/Broken_Hill'), (b'Australia/Currie', b'(GMT+1100) Australia/Currie'), (b'Australia/Darwin', b'(GMT+0930) Australia/Darwin'), (b'Australia/Eucla', b'(GMT+0845) Australia/Eucla'), (b'Australia/Hobart', b'(GMT+1100) Australia/Hobart'), (b'Australia/Lindeman', b'(GMT+1000) Australia/Lindeman'), (b'Australia/Lord_Howe', b'(GMT+1100) Australia/Lord_Howe'), (b'Australia/Melbourne', b'(GMT+1100) Australia/Melbourne'), (b'Australia/Perth', b'(GMT+0800) Australia/Perth'), (b'Australia/Sydney', b'(GMT+1100) Australia/Sydney'), (b'Canada/Atlantic', b'(GMT-0300) Canada/Atlantic'), (b'Canada/Central', b'(GMT-0500) Canada/Central'), (b'Canada/Eastern', b'(GMT-0400) Canada/Eastern'), (b'Canada/Mountain', b'(GMT-0600) Canada/Mountain'), (b'Canada/Newfoundland', b'(GMT-0230) Canada/Newfoundland'), (b'Canada/Pacific', b'(GMT-0700) Canada/Pacific'), (b'Europe/Amsterdam', b'(GMT+0200) Europe/Amsterdam'), (b'Europe/Andorra', b'(GMT+0200) Europe/Andorra'), (b'Europe/Athens', b'(GMT+0300) Europe/Athens'), (b'Europe/Belgrade', b'(GMT+0200) Europe/Belgrade'), (b'Europe/Berlin', b'(GMT+0200) Europe/Berlin'), (b'Europe/Bratislava', b'(GMT+0200) Europe/Bratislava'), (b'Europe/Brussels', b'(GMT+0200) Europe/Brussels'), (b'Europe/Bucharest', b'(GMT+0300) Europe/Bucharest'), (b'Europe/Budapest', b'(GMT+0200) Europe/Budapest'), (b'Europe/Busingen', b'(GMT+0200) Europe/Busingen'), (b'Europe/Chisinau', b'(GMT+0300) Europe/Chisinau'), (b'Europe/Copenhagen', b'(GMT+0200) Europe/Copenhagen'), (b'Europe/Dublin', b'(GMT+0100) Europe/Dublin'), (b'Europe/Gibraltar', b'(GMT+0200) Europe/Gibraltar'), (b'Europe/Guernsey', b'(GMT+0100) Europe/Guernsey'), (b'Europe/Helsinki', b'(GMT+0300) Europe/Helsinki'), (b'Europe/Isle_of_Man', b'(GMT+0100) Europe/Isle_of_Man'), (b'Europe/Istanbul', b'(GMT+0300) Europe/Istanbul'), (b'Europe/Jersey', b'(GMT+0100) Europe/Jersey'), (b'Europe/Kaliningrad', b'(GMT+0300) Europe/Kaliningrad'), (b'Europe/Kiev', b'(GMT+0300) Europe/Kiev'), (b'Europe/Lisbon', b'(GMT+0100) Europe/Lisbon'), (b'Europe/Ljubljana', b'(GMT+0200) Europe/Ljubljana'), (b'Europe/London', b'(GMT+0100) Europe/London'), (b'Europe/Luxembourg', b'(GMT+0200) Europe/Luxembourg'), (b'Europe/Madrid', b'(GMT+0200) Europe/Madrid'), (b'Europe/Malta', b'(GMT+0200) Europe/Malta'), (b'Europe/Mariehamn', b'(GMT+0300) Europe/Mariehamn'), (b'Europe/Minsk', b'(GMT+0300) Europe/Minsk'), (b'Europe/Monaco', b'(GMT+0200) Europe/Monaco'), (b'Europe/Moscow', b'(GMT+0400) Europe/Moscow'), (b'Europe/Oslo', b'(GMT+0200) Europe/Oslo'), (b'Europe/Paris', b'(GMT+0200) Europe/Paris'), (b'Europe/Podgorica', b'(GMT+0200) Europe/Podgorica'), (b'Europe/Prague', b'(GMT+0200) Europe/Prague'), (b'Europe/Riga', b'(GMT+0300) Europe/Riga'), (b'Europe/Rome', b'(GMT+0200) Europe/Rome'), (b'Europe/Samara', b'(GMT+0400) Europe/Samara'), (b'Europe/San_Marino', b'(GMT+0200) Europe/San_Marino'), (b'Europe/Sarajevo', b'(GMT+0200) Europe/Sarajevo'), (b'Europe/Simferopol', b'(GMT+0300) Europe/Simferopol'), (b'Europe/Skopje', b'(GMT+0200) Europe/Skopje'), (b'Europe/Sofia', b'(GMT+0300) Europe/Sofia'), (b'Europe/Stockholm', b'(GMT+0200) Europe/Stockholm'), (b'Europe/Tallinn', b'(GMT+0300) Europe/Tallinn'), (b'Europe/Tirane', b'(GMT+0200) Europe/Tirane'), (b'Europe/Uzhgorod', b'(GMT+0300) Europe/Uzhgorod'), (b'Europe/Vaduz', b'(GMT+0200) Europe/Vaduz'), (b'Europe/Vatican', b'(GMT+0200) Europe/Vatican'), (b'Europe/Vienna', b'(GMT+0200) Europe/Vienna'), (b'Europe/Vilnius', b'(GMT+0300) Europe/Vilnius'), (b'Europe/Volgograd', b'(GMT+0400) Europe/Volgograd'), (b'Europe/Warsaw', b'(GMT+0200) Europe/Warsaw'), (b'Europe/Zagreb', b'(GMT+0200) Europe/Zagreb'), (b'Europe/Zaporozhye', b'(GMT+0300) Europe/Zaporozhye'), (b'Europe/Zurich', b'(GMT+0200) Europe/Zurich'), (b'GMT', b'(GMT+0000) GMT'), (b'Indian/Antananarivo', b'(GMT+0300) Indian/Antananarivo'), (b'Indian/Chagos', b'(GMT+0600) Indian/Chagos'), (b'Indian/Christmas', b'(GMT+0700) Indian/Christmas'), (b'Indian/Cocos', b'(GMT+0630) Indian/Cocos'), (b'Indian/Comoro', b'(GMT+0300) Indian/Comoro'), (b'Indian/Kerguelen', b'(GMT+0500) Indian/Kerguelen'), (b'Indian/Mahe', b'(GMT+0400) Indian/Mahe'), (b'Indian/Maldives', b'(GMT+0500) Indian/Maldives'), (b'Indian/Mauritius', b'(GMT+0400) Indian/Mauritius'), (b'Indian/Mayotte', b'(GMT+0300) Indian/Mayotte'), (b'Indian/Reunion', b'(GMT+0400) Indian/Reunion'), (b'Pacific/Apia', b'(GMT+1400) Pacific/Apia'), (b'Pacific/Auckland', b'(GMT+1300) Pacific/Auckland'), (b'Pacific/Chatham', b'(GMT+1345) Pacific/Chatham'), (b'Pacific/Chuuk', b'(GMT+1000) Pacific/Chuuk'), (b'Pacific/Easter', b'(GMT-0500) Pacific/Easter'), (b'Pacific/Efate', b'(GMT+1100) Pacific/Efate'), (b'Pacific/Enderbury', b'(GMT+1300) Pacific/Enderbury'), (b'Pacific/Fakaofo', b'(GMT+1300) Pacific/Fakaofo'), (b'Pacific/Fiji', b'(GMT+1200) Pacific/Fiji'), (b'Pacific/Funafuti', b'(GMT+1200) Pacific/Funafuti'), (b'Pacific/Galapagos', b'(GMT-0600) Pacific/Galapagos'), (b'Pacific/Gambier', b'(GMT-0900) Pacific/Gambier'), (b'Pacific/Guadalcanal', b'(GMT+1100) Pacific/Guadalcanal'), (b'Pacific/Guam', b'(GMT+1000) Pacific/Guam'), (b'Pacific/Honolulu', b'(GMT-1000) Pacific/Honolulu'), (b'Pacific/Johnston', b'(GMT-1000) Pacific/Johnston'), (b'Pacific/Kiritimati', b'(GMT+1400) Pacific/Kiritimati'), (b'Pacific/Kosrae', b'(GMT+1100) Pacific/Kosrae'), (b'Pacific/Kwajalein', b'(GMT+1200) Pacific/Kwajalein'), (b'Pacific/Majuro', b'(GMT+1200) Pacific/Majuro'), (b'Pacific/Marquesas', b'(GMT-0930) Pacific/Marquesas'), (b'Pacific/Midway', b'(GMT-1100) Pacific/Midway'), (b'Pacific/Nauru', b'(GMT+1200) Pacific/Nauru'), (b'Pacific/Niue', b'(GMT-1100) Pacific/Niue'), (b'Pacific/Norfolk', b'(GMT+1130) Pacific/Norfolk'), (b'Pacific/Noumea', b'(GMT+1100) Pacific/Noumea'), (b'Pacific/Pago_Pago', b'(GMT-1100) Pacific/Pago_Pago'), (b'Pacific/Palau', b'(GMT+0900) Pacific/Palau'), (b'Pacific/Pitcairn', b'(GMT-0800) Pacific/Pitcairn'), (b'Pacific/Pohnpei', b'(GMT+1100) Pacific/Pohnpei'), (b'Pacific/Port_Moresby', b'(GMT+1000) Pacific/Port_Moresby'), (b'Pacific/Rarotonga', b'(GMT-1000) Pacific/Rarotonga'), (b'Pacific/Saipan', b'(GMT+1000) Pacific/Saipan'), (b'Pacific/Tahiti', b'(GMT-1000) Pacific/Tahiti'), (b'Pacific/Tarawa', b'(GMT+1200) Pacific/Tarawa'), (b'Pacific/Tongatapu', b'(GMT+1300) Pacific/Tongatapu'), (b'Pacific/Wake', b'(GMT+1200) Pacific/Wake'), (b'Pacific/Wallis', b'(GMT+1200) Pacific/Wallis'), (b'US/Alaska', b'(GMT-0800) US/Alaska'), (b'US/Arizona', b'(GMT-0700) US/Arizona'), (b'US/Central', b'(GMT-0500) US/Central'), (b'US/Eastern', b'(GMT-0400) US/Eastern'), (b'US/Hawaii', b'(GMT-1000) US/Hawaii'), (b'US/Mountain', b'(GMT-0600) US/Mountain'), (b'US/Pacific', b'(GMT-0700) US/Pacific'), (b'UTC', b'(GMT+0000) UTC')], max_length=100, blank=True, null=True, verbose_name='Timezone')),
('country', models.CharField(blank=True, max_length=2, null=True, verbose_name='Country', choices=[(b'AF', b'Afghanistan'), (b'AX', '\xc5land Islands'), (b'AL', b'Albania'), (b'DZ', b'Algeria'), (b'AS', b'American Samoa'), (b'AD', b'Andorra'), (b'AO', b'Angola'), (b'AI', b'Anguilla'), (b'AQ', b'Antarctica'), (b'AG', b'Antigua and Barbuda'), (b'AR', b'Argentina'), (b'AM', b'Armenia'), (b'AW', b'Aruba'), (b'AU', b'Australia'), (b'AT', b'Austria'), (b'AZ', b'Azerbaijan'), (b'BS', b'Bahamas'), (b'BH', b'Bahrain'), (b'BD', b'Bangladesh'), (b'BB', b'Barbados'), (b'BY', b'Belarus'), (b'BE', b'Belgium'), (b'BZ', b'Belize'), (b'BJ', b'Benin'), (b'BM', b'Bermuda'), (b'BT', b'Bhutan'), (b'BO', b'Bolivia, Plurinational State of'), (b'BA', b'Bosnia and Herzegovina'), (b'BW', b'Botswana'), (b'BV', b'Bouvet Island'), (b'BR', b'Brazil'), (b'IO', b'British Indian Ocean Territory'), (b'BN', b'Brunei Darussalam'), (b'BG', b'Bulgaria'), (b'BF', b'Burkina Faso'), (b'BI', b'Burundi'), (b'KH', b'Cambodia'), (b'CM', b'Cameroon'), (b'CA', b'Canada'), (b'CV', b'Cape Verde'), (b'KY', b'Cayman Islands'), (b'CF', b'Central African Republic'), (b'TD', b'Chad'), (b'CL', b'Chile'), (b'CN', b'China'), (b'CX', b'Christmas Island'), (b'CC', b'Cocos (Keeling) Islands'), (b'CO', b'Colombia'), (b'KM', b'Comoros'), (b'CG', b'Congo'), (b'CD', b'Congo, The Democratic Republic of the'), (b'CK', b'Cook Islands'), (b'CR', b'Costa Rica'), (b'CI', "C\xf4te d'Ivoire"), (b'HR', b'Croatia'), (b'CU', b'Cuba'), (b'CY', b'Cyprus'), (b'CZ', b'Czech Republic'), (b'DK', b'Denmark'), (b'DJ', b'Djibouti'), (b'DM', b'Dominica'), (b'DO', b'Dominican Republic'), (b'EC', b'Ecuador'), (b'EG', b'Egypt'), (b'SV', b'El Salvador'), (b'GQ', b'Equatorial Guinea'), (b'ER', b'Eritrea'), (b'EE', b'Estonia'), (b'ET', b'Ethiopia'), (b'FK', b'Falkland Islands (Malvinas)'), (b'FO', b'Faroe Islands'), (b'FJ', b'Fiji'), (b'FI', b'Finland'), (b'FR', b'France'), (b'GF', b'French Guiana'), (b'PF', b'French Polynesia'), (b'TF', b'French Southern Territories'), (b'GA', b'Gabon'), (b'GM', b'Gambia'), (b'GE', b'Georgia'), (b'DE', b'Germany'), (b'GH', b'Ghana'), (b'GI', b'Gibraltar'), (b'GR', b'Greece'), (b'GL', b'Greenland'), (b'GD', b'Grenada'), (b'GP', b'Guadeloupe'), (b'GU', b'Guam'), (b'GT', b'Guatemala'), (b'GG', b'Guernsey'), (b'GN', b'Guinea'), (b'GW', b'Guinea-Bissau'), (b'GY', b'Guyana'), (b'HT', b'Haiti'), (b'HM', b'Heard Island and McDonald Islands'), (b'VA', b'Holy See (Vatican City State)'), (b'HN', b'Honduras'), (b'HK', b'Hong Kong'), (b'HU', b'Hungary'), (b'IS', b'Iceland'), (b'IN', b'India'), (b'ID', b'Indonesia'), (b'IR', b'Iran, Islamic Republic of'), (b'IQ', b'Iraq'), (b'IE', b'Ireland'), (b'IM', b'Isle of Man'), (b'IL', b'Israel'), (b'IT', b'Italy'), (b'JM', b'Jamaica'), (b'JP', b'Japan'), (b'JE', b'Jersey'), (b'JO', b'Jordan'), (b'KZ', b'Kazakhstan'), (b'KE', b'Kenya'), (b'KI', b'Kiribati'), (b'KP', b"Korea, Democratic People's Republic of"), (b'KR', b'Korea, Republic of'), (b'KW', b'Kuwait'), (b'KG', b'Kyrgyzstan'), (b'LA', b"Lao People's Democratic Republic"), (b'LV', b'Latvia'), (b'LB', b'Lebanon'), (b'LS', b'Lesotho'), (b'LR', b'Liberia'), (b'LY', b'Libyan Arab Jamahiriya'), (b'LI', b'Liechtenstein'), (b'LT', b'Lithuania'), (b'LU', b'Luxembourg'), (b'MO', b'Macao'), (b'MK', b'Macedonia, The Former Yugoslav Republic of'), (b'MG', b'Madagascar'), (b'MW', b'Malawi'), (b'MY', b'Malaysia'), (b'MV', b'Maldives'), (b'ML', b'Mali'), (b'MT', b'Malta'), (b'MH', b'Marshall Islands'), (b'MQ', b'Martinique'), (b'MR', b'Mauritania'), (b'MU', b'Mauritius'), (b'YT', b'Mayotte'), (b'MX', b'Mexico'), (b'FM', b'Micronesia, Federated States of'), (b'MD', b'Moldova, Republic of'), (b'MC', b'Monaco'), (b'MN', b'Mongolia'), (b'ME', b'Montenegro'), (b'MS', b'Montserrat'), (b'MA', b'Morocco'), (b'MZ', b'Mozambique'), (b'MM', b'Myanmar'), (b'NA', b'Namibia'), (b'NR', b'Nauru'), (b'NP', b'Nepal'), (b'NL', b'Netherlands'), (b'AN', b'Netherlands Antilles'), (b'NC', b'New Caledonia'), (b'NZ', b'New Zealand'), (b'NI', b'Nicaragua'), (b'NE', b'Niger'), (b'NG', b'Nigeria'), (b'NU', b'Niue'), (b'NF', b'Norfolk Island'), (b'MP', b'Northern Mariana Islands'), (b'NO', b'Norway'), (b'OM', b'Oman'), (b'PK', b'Pakistan'), (b'PW', b'Palau'), (b'PS', b'Palestinian Territory, Occupied'), (b'PA', b'Panama'), (b'PG', b'Papua New Guinea'), (b'PY', b'Paraguay'), (b'PE', b'Peru'), (b'PH', b'Philippines'), (b'PN', b'Pitcairn'), (b'PL', b'Poland'), (b'PT', b'Portugal'), (b'PR', b'Puerto Rico'), (b'QA', b'Qatar'), (b'RE', 'R\xe9union'), (b'RO', b'Romania'), (b'RU', b'Russian Federation'), (b'RW', b'Rwanda'), (b'BL', 'Saint Barth\xe9lemy'), (b'SH', b'Saint Helena, Ascension and Tristan Da Cunha'), (b'KN', b'Saint Kitts and Nevis'), (b'LC', b'Saint Lucia'), (b'MF', b'Saint Martin'), (b'PM', b'Saint Pierre and Miquelon'), (b'VC', b'Saint Vincent and the Grenadines'), (b'WS', b'Samoa'), (b'SM', b'San Marino'), (b'ST', b'Sao Tome and Principe'), (b'SA', b'Saudi Arabia'), (b'SN', b'Senegal'), (b'RS', b'Serbia'), (b'SC', b'Seychelles'), (b'SL', b'Sierra Leone'), (b'SG', b'Singapore'), (b'SK', b'Slovakia'), (b'SI', b'Slovenia'), (b'SB', b'Solomon Islands'), (b'SO', b'Somalia'), (b'ZA', b'South Africa'), (b'GS', b'South Georgia and the South Sandwich Islands'), (b'ES', b'Spain'), (b'LK', b'Sri Lanka'), (b'SD', b'Sudan'), (b'SR', b'Suriname'), (b'SJ', b'Svalbard and Jan Mayen'), (b'SZ', b'Swaziland'), (b'SE', b'Sweden'), (b'CH', b'Switzerland'), (b'SY', b'Syrian Arab Republic'), (b'TW', b'Taiwan'), (b'TJ', b'Tajikistan'), (b'TZ', b'Tanzania, United Republic of'), (b'TH', b'Thailand'), (b'TL', b'Timor-Leste'), (b'TG', b'Togo'), (b'TK', b'Tokelau'), (b'TO', b'Tonga'), (b'TT', b'Trinidad and Tobago'), (b'TN', b'Tunisia'), (b'TR', b'Turkey'), (b'TM', b'Turkmenistan'), (b'TC', b'Turks and Caicos Islands'), (b'TV', b'Tuvalu'), (b'UG', b'Uganda'), (b'UA', b'Ukraine'), (b'AE', b'United Arab Emirates'), (b'GB', b'United Kingdom'), (b'US', b'United States'), (b'UM', b'United States Minor Outlying Islands'), (b'UY', b'Uruguay'), (b'UZ', b'Uzbekistan'), (b'VU', b'Vanuatu'), (b'VE', b'Venezuela, Bolivarian Republic of'), (b'VN', b'Viet Nam'), (b'VG', b'Virgin Islands, British'), (b'VI', b'Virgin Islands, U.S.'), (b'WF', b'Wallis and Futuna'), (b'EH', b'Western Sahara'), (b'YE', b'Yemen'), (b'ZM', b'Zambia'), (b'ZW', b'Zimbabwe')])),
('city', models.CharField(max_length=255, null=True, verbose_name='City', blank=True)),
('locale', kitsune.sumo.models.LocaleField(default=b'en-US', max_length=7, verbose_name='Preferred language', choices=[(b'af', 'Afrikaans'), (b'ar', '\u0639\u0631\u0628\u064a'), (b'az', 'Az\u0259rbaycanca'), (b'bg', '\u0411\u044a\u043b\u0433\u0430\u0440\u0441\u043a\u0438'), (b'bn-BD', '\u09ac\u09be\u0982\u09b2\u09be (\u09ac\u09be\u0982\u09b2\u09be\u09a6\u09c7\u09b6)'), (b'bn-IN', '\u09ac\u09be\u0982\u09b2\u09be (\u09ad\u09be\u09b0\u09a4)'), (b'bs', 'Bosanski'), (b'ca', 'catal\xe0'), (b'cs', '\u010ce\u0161tina'), (b'da', 'Dansk'), (b'de', 'Deutsch'), (b'ee', '\xc8\u028begbe'), (b'el', '\u0395\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac'), (b'en-US', 'English'), (b'es', 'Espa\xf1ol'), (b'et', 'eesti keel'), (b'eu', 'Euskara'), (b'fa', '\u0641\u0627\u0631\u0633\u06cc'), (b'fi', 'suomi'), (b'fr', 'Fran\xe7ais'), (b'fy-NL', 'Frysk'), (b'ga-IE', 'Gaeilge (\xc9ire)'), (b'gl', 'Galego'), (b'gu-IN', '\u0a97\u0ac1\u0a9c\u0ab0\u0abe\u0aa4\u0ac0'), (b'ha', '\u0647\u064e\u0631\u0652\u0634\u064e\u0646 \u0647\u064e\u0648\u0652\u0633\u064e'), (b'he', '\u05e2\u05d1\u05e8\u05d9\u05ea'), (b'hi-IN', '\u0939\u093f\u0928\u094d\u0926\u0940 (\u092d\u093e\u0930\u0924)'), (b'hr', 'Hrvatski'), (b'hu', 'Magyar'), (b'id', 'Bahasa Indonesia'), (b'ig', 'As\u1ee5s\u1ee5 Igbo'), (b'it', 'Italiano'), (b'ja', '\u65e5\u672c\u8a9e'), (b'km', '\u1781\u17d2\u1798\u17c2\u179a'), (b'ko', '\ud55c\uad6d\uc5b4'), (b'ln', 'Ling\xe1la'), (b'lt', 'lietuvi\u0173 kalba'), (b'ne-NP', '\u0928\u0947\u092a\u093e\u0932\u0940'), (b'nl', 'Nederlands'), (b'no', 'Norsk'), (b'pl', 'Polski'), (b'pt-BR', 'Portugu\xeas (do Brasil)'), (b'pt-PT', 'Portugu\xeas (Europeu)'), (b'ro', 'rom\xe2n\u0103'), (b'ru', '\u0420\u0443\u0441\u0441\u043a\u0438\u0439'), (b'si', '\u0dc3\u0dd2\u0d82\u0dc4\u0dbd'), (b'sk', 'sloven\u010dina'), (b'sl', 'sloven\u0161\u010dina'), (b'sq', 'Shqip'), (b'sr-Cyrl', '\u0421\u0440\u043f\u0441\u043a\u0438'), (b'sw', 'Kiswahili'), (b'sv', 'Svenska'), (b'ta', '\u0ba4\u0bae\u0bbf\u0bb4\u0bcd'), (b'ta-LK', '\u0ba4\u0bae\u0bbf\u0bb4\u0bcd (\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8)'), (b'te', '\u0c24\u0c46\u0c32\u0c41\u0c17\u0c41'), (b'th', '\u0e44\u0e17\u0e22'), (b'tr', 'T\xfcrk\xe7e'), (b'uk', '\u0423\u043a\u0440\u0430\u0457\u043d\u0441\u044c\u043a\u0430'), (b'ur', '\u0627\u064f\u0631\u062f\u0648'), (b'vi', 'Ti\u1ebfng Vi\u1ec7t'), (b'wo', 'Wolof'), (b'xh', 'isiXhosa'), (b'yo', '\xe8d\xe8 Yor\xf9b\xe1'), (b'zh-CN', '\u4e2d\u6587 (\u7b80\u4f53)'), (b'zh-TW', '\u6b63\u9ad4\u4e2d\u6587 (\u7e41\u9ad4)'), (b'zu', 'isiZulu')])),
],
options={
'permissions': (('view_karma_points', 'Can view karma points'), ('deactivate_users', 'Can deactivate users')),
},
bases=(models.Model, kitsune.search.models.SearchMixin),
),
migrations.CreateModel(
name='RegistrationProfile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('activation_key', models.CharField(max_length=40, verbose_name='activation key')),
('user', models.ForeignKey(verbose_name='user', to=settings.AUTH_USER_MODEL, unique=True)),
],
options={
'verbose_name': 'registration profile',
'verbose_name_plural': 'registration profiles',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Setting',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('value', models.CharField(max_length=60, verbose_name='Value', blank=True)),
('user', models.ForeignKey(related_name='settings', verbose_name='User', to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='setting',
unique_together=set([('user', 'name')]),
),
migrations.AddField(
model_name='emailchange',
name='user',
field=models.ForeignKey(verbose_name='user', to=settings.AUTH_USER_MODEL, unique=True),
preserve_default=True,
),
migrations.AddField(
model_name='deactivation',
name='moderator',
field=models.ForeignKey(related_name='deactivations', verbose_name='moderator', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
migrations.AddField(
model_name='deactivation',
name='user',
field=models.ForeignKey(related_name='+', verbose_name='user', to=settings.AUTH_USER_MODEL),
preserve_default=True,
),
]
| bsd-3-clause |
stvstnfrd/edx-platform | lms/djangoapps/courseware/views/views.py | 1 | 89915 | """
Courseware views functions
"""
import json
import logging
from collections import OrderedDict, namedtuple
from datetime import datetime
import bleach
import requests
import six
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import AnonymousUser, User # lint-amnesty, pylint: disable=imported-auth-user
from django.core.exceptions import PermissionDenied
from django.db import transaction
from django.db.models import Q, prefetch_related_objects
from django.http import Http404, HttpResponse, HttpResponseBadRequest, HttpResponseForbidden
from django.shortcuts import redirect
from django.template.context_processors import csrf
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.utils.http import urlquote_plus
from django.utils.text import slugify
from django.utils.translation import ugettext
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext_noop
from django.views.decorators.cache import cache_control
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.decorators.http import require_GET, require_http_methods, require_POST
from django.views.generic import View
from edx_django_utils import monitoring as monitoring_utils
from edx_django_utils.monitoring import set_custom_attribute, set_custom_attributes_for_course_key
from ipware.ip import get_client_ip
from markupsafe import escape
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from pytz import UTC
from requests.exceptions import ConnectionError, Timeout # pylint: disable=redefined-builtin
from rest_framework import status
from rest_framework.decorators import api_view, throttle_classes
from rest_framework.response import Response
from rest_framework.throttling import UserRateThrottle
from six import text_type
from web_fragments.fragment import Fragment
from lms.djangoapps.survey import views as survey_views
from common.djangoapps.course_modes.models import CourseMode, get_course_prices
from common.djangoapps.edxmako.shortcuts import marketing_link, render_to_response, render_to_string
from lms.djangoapps.edxnotes.helpers import is_feature_enabled
from lms.djangoapps.ccx.custom_exception import CCXLocatorValidationException
from lms.djangoapps.certificates import api as certs_api
from lms.djangoapps.certificates.models import CertificateStatuses
from lms.djangoapps.commerce.utils import EcommerceService
from lms.djangoapps.course_home_api.toggles import course_home_mfe_dates_tab_is_active
from openedx.features.course_experience.url_helpers import get_learning_mfe_home_url, is_request_from_learning_mfe
from lms.djangoapps.courseware.access import has_access, has_ccx_coach_role
from lms.djangoapps.courseware.access_utils import check_course_open_for_learner, check_public_access
from lms.djangoapps.courseware.courses import (
can_self_enroll_in_course,
course_open_for_self_enrollment,
get_course,
get_course_date_blocks,
get_course_overview_with_access,
get_course_with_access,
get_courses,
get_current_child,
get_permission_for_course_about,
get_studio_url,
sort_by_announcement,
sort_by_start_date
)
from lms.djangoapps.courseware.date_summary import verified_upgrade_deadline_link
from lms.djangoapps.courseware.exceptions import CourseAccessRedirect, Redirect
from lms.djangoapps.courseware.masquerade import setup_masquerade
from lms.djangoapps.courseware.model_data import FieldDataCache
from lms.djangoapps.courseware.models import BaseStudentModuleHistory, StudentModule
from lms.djangoapps.courseware.permissions import ( # lint-amnesty, pylint: disable=unused-import
MASQUERADE_AS_STUDENT,
VIEW_COURSE_HOME,
VIEW_COURSEWARE,
VIEW_XQA_INTERFACE
)
from lms.djangoapps.courseware.user_state_client import DjangoXBlockUserStateClient
from lms.djangoapps.experiments.utils import get_experiment_user_metadata_context
from lms.djangoapps.grades.api import CourseGradeFactory
from lms.djangoapps.instructor.enrollment import uses_shib
from lms.djangoapps.instructor.views.api import require_global_staff
from lms.djangoapps.verify_student.services import IDVerificationService
from openedx.core.djangoapps.catalog.utils import get_programs, get_programs_with_type
from openedx.core.djangoapps.certificates import api as auto_certs_api
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.credit.api import (
get_credit_requirement_status,
is_credit_course,
is_user_eligible_for_credit
)
from openedx.core.djangoapps.enrollments.api import add_enrollment, get_enrollment # lint-amnesty, pylint: disable=unused-import
from openedx.core.djangoapps.enrollments.permissions import ENROLL_IN_COURSE
from openedx.core.djangoapps.models.course_details import CourseDetails
from openedx.core.djangoapps.plugin_api.views import EdxFragmentView
from openedx.core.djangoapps.programs.utils import ProgramMarketingDataExtender
from openedx.core.djangoapps.self_paced.models import SelfPacedConfiguration
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.util.user_messages import PageLevelMessages
from openedx.core.djangoapps.zendesk_proxy.utils import create_zendesk_ticket
from openedx.core.djangolib.markup import HTML, Text
from openedx.core.lib.mobile_utils import is_request_from_mobile_app
from openedx.features.content_type_gating.models import ContentTypeGatingConfig
from openedx.features.course_duration_limits.access import generate_course_expired_fragment
from openedx.features.course_experience import DISABLE_UNIFIED_COURSE_TAB_FLAG, course_home_url_name
from openedx.features.course_experience.course_tools import CourseToolsPluginManager
from openedx.features.course_experience.url_helpers import get_legacy_courseware_url
from openedx.features.course_experience.utils import dates_banner_should_display
from openedx.features.course_experience.views.course_dates import CourseDatesFragmentView
from openedx.features.course_experience.waffle import ENABLE_COURSE_ABOUT_SIDEBAR_HTML
from openedx.features.course_experience.waffle import waffle as course_experience_waffle
from openedx.features.enterprise_support.api import data_sharing_consent_required
from common.djangoapps.student.models import CourseEnrollment, UserTestGroup
from common.djangoapps.track import segment
from common.djangoapps.util.cache import cache, cache_if_anonymous
from common.djangoapps.util.db import outer_atomic
from common.djangoapps.util.milestones_helpers import get_prerequisite_courses_display
from common.djangoapps.util.views import ensure_valid_course_key, ensure_valid_usage_key
from xmodule.course_module import COURSE_VISIBILITY_PUBLIC, COURSE_VISIBILITY_PUBLIC_OUTLINE
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError, NoPathToItem
from xmodule.tabs import CourseTabList
from xmodule.x_module import STUDENT_VIEW
from ..context_processor import user_timezone_locale_prefs
from ..entrance_exams import user_can_skip_entrance_exam
from ..module_render import get_module, get_module_by_usage_id, get_module_for_descriptor
from ..tabs import _get_dynamic_tabs
from ..toggles import COURSEWARE_OPTIMIZED_RENDER_XBLOCK
log = logging.getLogger("edx.courseware")
# Only display the requirements on learner dashboard for
# credit and verified modes.
REQUIREMENTS_DISPLAY_MODES = CourseMode.CREDIT_MODES + [CourseMode.VERIFIED]
CertData = namedtuple(
"CertData", ["cert_status", "title", "msg", "download_url", "cert_web_view_url"]
)
EARNED_BUT_NOT_AVAILABLE_CERT_STATUS = 'earned_but_not_available'
AUDIT_PASSING_CERT_DATA = CertData(
CertificateStatuses.audit_passing,
_('Your enrollment: Audit track'),
_('You are enrolled in the audit track for this course. The audit track does not include a certificate.'),
download_url=None,
cert_web_view_url=None
)
HONOR_PASSING_CERT_DATA = CertData(
CertificateStatuses.honor_passing,
_('Your enrollment: Honor track'),
_('You are enrolled in the honor track for this course. The honor track does not include a certificate.'),
download_url=None,
cert_web_view_url=None
)
INELIGIBLE_PASSING_CERT_DATA = {
CourseMode.AUDIT: AUDIT_PASSING_CERT_DATA,
CourseMode.HONOR: HONOR_PASSING_CERT_DATA
}
GENERATING_CERT_DATA = CertData(
CertificateStatuses.generating,
_("We're working on it..."),
_(
"We're creating your certificate. You can keep working in your courses and a link "
"to it will appear here and on your Dashboard when it is ready."
),
download_url=None,
cert_web_view_url=None
)
INVALID_CERT_DATA = CertData(
CertificateStatuses.invalidated,
_('Your certificate has been invalidated'),
_('Please contact your course team if you have any questions.'),
download_url=None,
cert_web_view_url=None
)
REQUESTING_CERT_DATA = CertData(
CertificateStatuses.requesting,
_('Congratulations, you qualified for a certificate!'),
_("You've earned a certificate for this course."),
download_url=None,
cert_web_view_url=None
)
UNVERIFIED_CERT_DATA = CertData(
CertificateStatuses.unverified,
_('Certificate unavailable'),
_(
u'You have not received a certificate because you do not have a current {platform_name} '
'verified identity.'
).format(platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)),
download_url=None,
cert_web_view_url=None
)
EARNED_BUT_NOT_AVAILABLE_CERT_DATA = CertData(
EARNED_BUT_NOT_AVAILABLE_CERT_STATUS,
_('Your certificate will be available soon!'),
_('After this course officially ends, you will receive an email notification with your certificate.'),
download_url=None,
cert_web_view_url=None
)
def _downloadable_cert_data(download_url=None, cert_web_view_url=None):
return CertData(
CertificateStatuses.downloadable,
_('Your certificate is available'),
_("You've earned a certificate for this course."),
download_url=download_url,
cert_web_view_url=cert_web_view_url
)
def user_groups(user):
"""
TODO (vshnayder): This is not used. When we have a new plan for groups, adjust appropriately.
"""
if not user.is_authenticated:
return []
# TODO: Rewrite in Django
key = 'user_group_names_{user.id}'.format(user=user)
cache_expiration = 60 * 60 # one hour
# Kill caching on dev machines -- we switch groups a lot
group_names = cache.get(key)
if settings.DEBUG:
group_names = None
if group_names is None:
group_names = [u.name for u in UserTestGroup.objects.filter(users=user)]
cache.set(key, group_names, cache_expiration)
return group_names
@ensure_csrf_cookie
@cache_if_anonymous()
def courses(request):
"""
Render "find courses" page. The course selection work is done in courseware.courses.
"""
courses_list = []
course_discovery_meanings = getattr(settings, 'COURSE_DISCOVERY_MEANINGS', {})
if not settings.FEATURES.get('ENABLE_COURSE_DISCOVERY'):
courses_list = get_courses(request.user)
if configuration_helpers.get_value("ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"]):
courses_list = sort_by_start_date(courses_list)
else:
courses_list = sort_by_announcement(courses_list)
# Add marketable programs to the context.
programs_list = get_programs_with_type(request.site, include_hidden=False)
return render_to_response(
"courseware/courses.html",
{
'courses': courses_list,
'course_discovery_meanings': course_discovery_meanings,
'programs_list': programs_list,
}
)
class PerUserVideoMetadataThrottle(UserRateThrottle):
"""
setting rate limit for yt_video_metadata API
"""
rate = settings.RATE_LIMIT_FOR_VIDEO_METADATA_API
@ensure_csrf_cookie
@login_required
@api_view(['GET'])
@throttle_classes([PerUserVideoMetadataThrottle])
def yt_video_metadata(request):
"""
Will hit the youtube API if the key is available in settings
:return: youtube video metadata
"""
video_id = request.GET.get('id', None)
metadata, status_code = load_metadata_from_youtube(video_id, request)
return Response(metadata, status=status_code, content_type='application/json')
def load_metadata_from_youtube(video_id, request):
"""
Get metadata about a YouTube video.
This method is used via the standalone /courses/yt_video_metadata REST API
endpoint, or via the video XBlock as a its 'yt_video_metadata' handler.
"""
metadata = {}
status_code = 500
if video_id and settings.YOUTUBE_API_KEY and settings.YOUTUBE_API_KEY != 'PUT_YOUR_API_KEY_HERE':
yt_api_key = settings.YOUTUBE_API_KEY
yt_metadata_url = settings.YOUTUBE['METADATA_URL']
yt_timeout = settings.YOUTUBE.get('TEST_TIMEOUT', 1500) / 1000 # converting milli seconds to seconds
headers = {}
http_referer = None
try:
# This raises an attribute error if called from the xblock yt_video_metadata handler, which passes
# a webob request instead of a django request.
http_referer = request.META.get('HTTP_REFERER')
except AttributeError:
# So here, let's assume it's a webob request and access the referer the webob way.
http_referer = request.referer
if http_referer:
headers['Referer'] = http_referer
payload = {'id': video_id, 'part': 'contentDetails', 'key': yt_api_key}
try:
res = requests.get(yt_metadata_url, params=payload, timeout=yt_timeout, headers=headers)
status_code = res.status_code
if res.status_code == 200:
try:
res_json = res.json()
if res_json.get('items', []):
metadata = res_json
else:
logging.warning(u'Unable to find the items in response. Following response '
u'was received: {res}'.format(res=res.text))
except ValueError:
logging.warning(u'Unable to decode response to json. Following response '
u'was received: {res}'.format(res=res.text))
else:
logging.warning(u'YouTube API request failed with status code={status} - '
u'Error message is={message}'.format(status=status_code, message=res.text))
except (Timeout, ConnectionError):
logging.warning(u'YouTube API request failed because of connection time out or connection error')
else:
logging.warning(u'YouTube API key or video id is None. Please make sure API key and video id is not None')
return metadata, status_code
@ensure_csrf_cookie
@ensure_valid_course_key
def jump_to_id(request, course_id, module_id):
"""
This entry point allows for a shorter version of a jump to where just the id of the element is
passed in. This assumes that id is unique within the course_id namespace
"""
course_key = CourseKey.from_string(course_id)
items = modulestore().get_items(course_key, qualifiers={'name': module_id})
if len(items) == 0:
raise Http404(
u"Could not find id: {0} in course_id: {1}. Referer: {2}".format(
module_id, course_id, request.META.get("HTTP_REFERER", "")
))
if len(items) > 1:
log.warning(
u"Multiple items found with id: %s in course_id: %s. Referer: %s. Using first: %s",
module_id,
course_id,
request.META.get("HTTP_REFERER", ""),
text_type(items[0].location)
)
return jump_to(request, course_id, text_type(items[0].location))
@ensure_csrf_cookie
def jump_to(_request, course_id, location):
"""
Show the page that contains a specific location.
If the location is invalid or not in any class, return a 404.
Otherwise, delegates to the index view to figure out whether this user
has access, and what they should see.
"""
try:
course_key = CourseKey.from_string(course_id)
usage_key = UsageKey.from_string(location).replace(course_key=course_key)
except InvalidKeyError:
raise Http404(u"Invalid course_key or usage_key") # lint-amnesty, pylint: disable=raise-missing-from
try:
redirect_url = get_legacy_courseware_url(course_key, usage_key, _request)
except ItemNotFoundError:
raise Http404(u"No data at this location: {0}".format(usage_key)) # lint-amnesty, pylint: disable=raise-missing-from
except NoPathToItem:
raise Http404(u"This location is not in any class: {0}".format(usage_key)) # lint-amnesty, pylint: disable=raise-missing-from
return redirect(redirect_url)
@ensure_csrf_cookie
@ensure_valid_course_key
@data_sharing_consent_required
def course_info(request, course_id):
"""
Display the course's info.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
# TODO: LEARNER-611: This can be deleted with Course Info removal. The new
# Course Home is using its own processing of last accessed.
def get_last_accessed_courseware(course, request, user):
"""
Returns the courseware module URL that the user last accessed, or None if it cannot be found.
"""
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, request.user, course, depth=2
)
course_module = get_module_for_descriptor(
user,
request,
course,
field_data_cache,
course.id,
course=course,
will_recheck_access=True,
)
chapter_module = get_current_child(course_module)
if chapter_module is not None:
section_module = get_current_child(chapter_module)
if section_module is not None:
url = reverse('courseware_section', kwargs={
'course_id': text_type(course.id),
'chapter': chapter_module.url_name,
'section': section_module.url_name
})
return url
return None
course_key = CourseKey.from_string(course_id)
# If the unified course experience is enabled, redirect to the "Course" tab
if not DISABLE_UNIFIED_COURSE_TAB_FLAG.is_enabled(course_key):
return redirect(reverse(course_home_url_name(course_key), args=[course_id]))
with modulestore().bulk_operations(course_key):
course = get_course_with_access(request.user, 'load', course_key)
can_masquerade = request.user.has_perm(MASQUERADE_AS_STUDENT, course)
masquerade, user = setup_masquerade(request, course_key, can_masquerade, reset_masquerade_data=True)
# LEARNER-612: CCX redirect handled by new Course Home (DONE)
# LEARNER-1697: Transition banner messages to new Course Home (DONE)
# if user is not enrolled in a course then app will show enroll/get register link inside course info page.
user_is_enrolled = CourseEnrollment.is_enrolled(user, course.id)
show_enroll_banner = request.user.is_authenticated and not user_is_enrolled
# If the user is not enrolled but this is a course that does not support
# direct enrollment then redirect them to the dashboard.
if not user_is_enrolled and not can_self_enroll_in_course(course_key):
return redirect(reverse('dashboard'))
# LEARNER-170: Entrance exam is handled by new Course Outline. (DONE)
# If the user needs to take an entrance exam to access this course, then we'll need
# to send them to that specific course module before allowing them into other areas
if not user_can_skip_entrance_exam(user, course):
return redirect(reverse('courseware', args=[text_type(course.id)]))
# Construct the dates fragment
dates_fragment = None
if request.user.is_authenticated:
# TODO: LEARNER-611: Remove enable_course_home_improvements
if SelfPacedConfiguration.current().enable_course_home_improvements:
# Shared code with the new Course Home (DONE)
dates_fragment = CourseDatesFragmentView().render_to_fragment(request, course_id=course_id)
# Shared code with the new Course Home (DONE)
# Get the course tools enabled for this user and course
course_tools = CourseToolsPluginManager.get_enabled_course_tools(request, course_key)
course_homepage_invert_title =\
configuration_helpers.get_value(
'COURSE_HOMEPAGE_INVERT_TITLE',
False
)
course_homepage_show_subtitle =\
configuration_helpers.get_value(
'COURSE_HOMEPAGE_SHOW_SUBTITLE',
True
)
course_homepage_show_org =\
configuration_helpers.get_value('COURSE_HOMEPAGE_SHOW_ORG', True)
course_title = course.display_number_with_default
course_subtitle = course.display_name_with_default
if course_homepage_invert_title:
course_title = course.display_name_with_default
course_subtitle = course.display_number_with_default
context = {
'request': request,
'masquerade_user': user,
'course_id': text_type(course_key),
'url_to_enroll': CourseTabView.url_to_enroll(course_key),
'cache': None,
'course': course,
'course_title': course_title,
'course_subtitle': course_subtitle,
'show_subtitle': course_homepage_show_subtitle,
'show_org': course_homepage_show_org,
'can_masquerade': can_masquerade,
'masquerade': masquerade,
'supports_preview_menu': True,
'studio_url': get_studio_url(course, 'course_info'),
'show_enroll_banner': show_enroll_banner,
'user_is_enrolled': user_is_enrolled,
'dates_fragment': dates_fragment,
'course_tools': course_tools,
}
context.update(
get_experiment_user_metadata_context(
course,
user,
)
)
# Get the URL of the user's last position in order to display the 'where you were last' message
context['resume_course_url'] = None
# TODO: LEARNER-611: Remove enable_course_home_improvements
if SelfPacedConfiguration.current().enable_course_home_improvements:
context['resume_course_url'] = get_last_accessed_courseware(course, request, user)
if not check_course_open_for_learner(user, course):
# Disable student view button if user is staff and
# course is not yet visible to students.
context['disable_student_access'] = True
context['supports_preview_menu'] = False
return render_to_response('courseware/info.html', context)
class StaticCourseTabView(EdxFragmentView):
"""
View that displays a static course tab with a given name.
"""
@method_decorator(ensure_csrf_cookie)
@method_decorator(ensure_valid_course_key)
def get(self, request, course_id, tab_slug, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Displays a static course tab page with a given name
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
tab = CourseTabList.get_tab_by_slug(course.tabs, tab_slug)
if tab is None:
raise Http404
# Show warnings if the user has limited access
CourseTabView.register_user_access_warning_messages(request, course)
return super(StaticCourseTabView, self).get(request, course=course, tab=tab, **kwargs) # lint-amnesty, pylint: disable=super-with-arguments
def render_to_fragment(self, request, course=None, tab=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Renders the static tab to a fragment.
"""
return get_static_tab_fragment(request, course, tab)
def render_standalone_response(self, request, fragment, course=None, tab=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Renders this static tab's fragment to HTML for a standalone page.
"""
return render_to_response('courseware/static_tab.html', {
'course': course,
'active_page': 'static_tab_{0}'.format(tab['url_slug']),
'tab': tab,
'fragment': fragment,
'disable_courseware_js': True,
})
class CourseTabView(EdxFragmentView):
"""
View that displays a course tab page.
"""
@method_decorator(ensure_csrf_cookie)
@method_decorator(ensure_valid_course_key)
@method_decorator(data_sharing_consent_required)
def get(self, request, course_id, tab_type, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Displays a course tab page that contains a web fragment.
"""
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = get_course_with_access(request.user, 'load', course_key)
try:
# Render the page
course_tabs = course.tabs + _get_dynamic_tabs(course, request.user)
tab = CourseTabList.get_tab_by_type(course_tabs, tab_type)
page_context = self.create_page_context(request, course=course, tab=tab, **kwargs)
# Show warnings if the user has limited access
# Must come after masquerading on creation of page context
self.register_user_access_warning_messages(request, course)
set_custom_attributes_for_course_key(course_key)
return super(CourseTabView, self).get(request, course=course, page_context=page_context, **kwargs) # lint-amnesty, pylint: disable=super-with-arguments
except Exception as exception: # pylint: disable=broad-except
return CourseTabView.handle_exceptions(request, course_key, course, exception)
@staticmethod
def url_to_enroll(course_key):
"""
Returns the URL to use to enroll in the specified course.
"""
url_to_enroll = reverse('about_course', args=[text_type(course_key)])
if settings.FEATURES.get('ENABLE_MKTG_SITE'):
url_to_enroll = marketing_link('COURSES')
return url_to_enroll
@staticmethod
def register_user_access_warning_messages(request, course):
"""
Register messages to be shown to the user if they have limited access.
"""
allow_anonymous = check_public_access(course, [COURSE_VISIBILITY_PUBLIC])
if request.user.is_anonymous and not allow_anonymous:
if CourseTabView.course_open_for_learner_enrollment(course):
PageLevelMessages.register_warning_message(
request,
Text(_(u"To see course content, {sign_in_link} or {register_link}.")).format(
sign_in_link=HTML(u'<a href="/login?next={current_url}">{sign_in_label}</a>').format(
sign_in_label=_("sign in"),
current_url=urlquote_plus(request.path),
),
register_link=HTML(u'<a href="/register?next={current_url}">{register_label}</a>').format(
register_label=_("register"),
current_url=urlquote_plus(request.path),
),
),
once_only=True
)
else:
PageLevelMessages.register_warning_message(
request,
Text(_(u"{sign_in_link} or {register_link}.")).format(
sign_in_link=HTML(u'<a href="/login?next={current_url}">{sign_in_label}</a>').format(
sign_in_label=_("Sign in"),
current_url=urlquote_plus(request.path),
),
register_link=HTML(u'<a href="/register?next={current_url}">{register_label}</a>').format(
register_label=_("register"),
current_url=urlquote_plus(request.path),
),
)
)
else:
if not CourseEnrollment.is_enrolled(request.user, course.id) and not allow_anonymous:
# Only show enroll button if course is open for enrollment.
if CourseTabView.course_open_for_learner_enrollment(course):
enroll_message = _(u'You must be enrolled in the course to see course content. \
{enroll_link_start}Enroll now{enroll_link_end}.')
PageLevelMessages.register_warning_message(
request,
Text(enroll_message).format(
enroll_link_start=HTML('<button class="enroll-btn btn-link">'),
enroll_link_end=HTML('</button>')
)
)
else:
PageLevelMessages.register_warning_message(
request,
Text(_('You must be enrolled in the course to see course content.'))
)
@staticmethod
def course_open_for_learner_enrollment(course):
return (course_open_for_self_enrollment(course.id)
and not course.invitation_only
and not CourseMode.is_masters_only(course.id))
@staticmethod
def handle_exceptions(request, course_key, course, exception):
u"""
Handle exceptions raised when rendering a view.
"""
if isinstance(exception, Redirect) or isinstance(exception, Http404): # lint-amnesty, pylint: disable=consider-merging-isinstance
raise # lint-amnesty, pylint: disable=misplaced-bare-raise
if settings.DEBUG:
raise # lint-amnesty, pylint: disable=misplaced-bare-raise
user = request.user
log.exception(
u"Error in %s: user=%s, effective_user=%s, course=%s",
request.path,
getattr(user, 'real_user', user),
user,
text_type(course_key),
)
try:
return render_to_response(
'courseware/courseware-error.html',
{
'staff_access': has_access(user, 'staff', course),
'course': course,
},
status=500,
)
except:
# Let the exception propagate, relying on global config to
# at least return a nice error message
log.exception("Error while rendering courseware-error page")
raise
def create_page_context(self, request, course=None, tab=None, **kwargs):
"""
Creates the context for the fragment's template.
"""
can_masquerade = request.user.has_perm(MASQUERADE_AS_STUDENT, course)
supports_preview_menu = tab.get('supports_preview_menu', False)
if supports_preview_menu:
masquerade, masquerade_user = setup_masquerade(
request,
course.id,
can_masquerade,
reset_masquerade_data=True,
)
request.user = masquerade_user
else:
masquerade = None
context = {
'course': course,
'tab': tab,
'active_page': tab.get('type', None),
'can_masquerade': can_masquerade,
'masquerade': masquerade,
'supports_preview_menu': supports_preview_menu,
'uses_bootstrap': True,
'disable_courseware_js': True,
}
# Avoid Multiple Mathjax loading on the 'user_profile'
if 'profile_page_context' in kwargs:
context['load_mathjax'] = kwargs['profile_page_context'].get('load_mathjax', True)
context.update(
get_experiment_user_metadata_context(
course,
request.user,
)
)
return context
def render_to_fragment(self, request, course=None, page_context=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Renders the course tab to a fragment.
"""
tab = page_context['tab']
return tab.render_to_fragment(request, course, **kwargs)
def render_standalone_response(self, request, fragment, course=None, tab=None, page_context=None, **kwargs): # lint-amnesty, pylint: disable=arguments-differ
"""
Renders this course tab's fragment to HTML for a standalone page.
"""
if not page_context:
page_context = self.create_page_context(request, course=course, tab=tab, **kwargs)
tab = page_context['tab']
page_context['fragment'] = fragment
return render_to_response('courseware/tab-view.html', page_context)
@ensure_csrf_cookie
@ensure_valid_course_key
def syllabus(request, course_id):
"""
Display the course's syllabus.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
return render_to_response('courseware/syllabus.html', {
'course': course,
'staff_access': staff_access,
})
def registered_for_course(course, user):
"""
Return True if user is registered for course, else False
"""
if user is None:
return False
if user.is_authenticated:
return CourseEnrollment.is_enrolled(user, course.id)
else:
return False
class EnrollStaffView(View):
"""
Displays view for registering in the course to a global staff user.
User can either choose to 'Enroll' or 'Don't Enroll' in the course.
Enroll: Enrolls user in course and redirects to the courseware.
Don't Enroll: Redirects user to course about page.
Arguments:
- request : HTTP request
- course_id : course id
Returns:
- RedirectResponse
"""
template_name = 'enroll_staff.html'
@method_decorator(require_global_staff)
@method_decorator(ensure_valid_course_key)
def get(self, request, course_id):
"""
Display enroll staff view to global staff user with `Enroll` and `Don't Enroll` options.
"""
user = request.user
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
course = get_course_with_access(user, 'load', course_key)
if not registered_for_course(course, user):
context = {
'course': course,
'csrftoken': csrf(request)["csrf_token"]
}
return render_to_response(self.template_name, context)
@method_decorator(require_global_staff)
@method_decorator(ensure_valid_course_key)
def post(self, request, course_id):
"""
Either enrolls the user in course or redirects user to course about page
depending upon the option (Enroll, Don't Enroll) chosen by the user.
"""
_next = six.moves.urllib.parse.quote_plus(request.GET.get('next', 'info'), safe='/:?=')
course_key = CourseKey.from_string(course_id)
enroll = 'enroll' in request.POST
if enroll:
add_enrollment(request.user.username, course_id)
log.info(
u"User %s enrolled in %s via `enroll_staff` view",
request.user.username,
course_id
)
return redirect(_next)
# In any other case redirect to the course about page.
return redirect(reverse('about_course', args=[text_type(course_key)]))
@ensure_csrf_cookie
@ensure_valid_course_key
@cache_if_anonymous()
def course_about(request, course_id):
"""
Display the course's about page.
"""
course_key = CourseKey.from_string(course_id)
# If a user is not able to enroll in a course then redirect
# them away from the about page to the dashboard.
if not can_self_enroll_in_course(course_key):
return redirect(reverse('dashboard'))
# If user needs to be redirected to course home then redirect
if _course_home_redirect_enabled():
return redirect(reverse(course_home_url_name(course_key), args=[text_type(course_key)]))
with modulestore().bulk_operations(course_key):
permission = get_permission_for_course_about()
course = get_course_with_access(request.user, permission, course_key)
course_details = CourseDetails.populate(course)
modes = CourseMode.modes_for_course_dict(course_key)
registered = registered_for_course(course, request.user)
staff_access = bool(has_access(request.user, 'staff', course))
studio_url = get_studio_url(course, 'settings/details')
if request.user.has_perm(VIEW_COURSE_HOME, course):
course_target = reverse(course_home_url_name(course.id), args=[text_type(course.id)])
else:
course_target = reverse('about_course', args=[text_type(course.id)])
show_courseware_link = bool(
(
request.user.has_perm(VIEW_COURSEWARE, course)
) or settings.FEATURES.get('ENABLE_LMS_MIGRATION')
)
# If the ecommerce checkout flow is enabled and the mode of the course is
# professional or no id professional, we construct links for the enrollment
# button to add the course to the ecommerce basket.
ecomm_service = EcommerceService()
ecommerce_checkout = ecomm_service.is_enabled(request.user)
ecommerce_checkout_link = ''
ecommerce_bulk_checkout_link = ''
single_paid_mode = None
if ecommerce_checkout:
if len(modes) == 1 and list(modes.values())[0].min_price:
single_paid_mode = list(modes.values())[0]
else:
# have professional ignore other modes for historical reasons
single_paid_mode = modes.get(CourseMode.PROFESSIONAL)
if single_paid_mode and single_paid_mode.sku:
ecommerce_checkout_link = ecomm_service.get_checkout_page_url(single_paid_mode.sku)
if single_paid_mode and single_paid_mode.bulk_sku:
ecommerce_bulk_checkout_link = ecomm_service.get_checkout_page_url(single_paid_mode.bulk_sku)
registration_price, course_price = get_course_prices(course) # lint-amnesty, pylint: disable=unused-variable
# Used to provide context to message to student if enrollment not allowed
can_enroll = bool(request.user.has_perm(ENROLL_IN_COURSE, course))
invitation_only = course.invitation_only
is_course_full = CourseEnrollment.objects.is_course_full(course)
# Register button should be disabled if one of the following is true:
# - Student is already registered for course
# - Course is already full
# - Student cannot enroll in course
active_reg_button = not (registered or is_course_full or not can_enroll)
is_shib_course = uses_shib(course)
# get prerequisite courses display names
pre_requisite_courses = get_prerequisite_courses_display(course)
# Overview
overview = CourseOverview.get_from_id(course.id)
sidebar_html_enabled = course_experience_waffle().is_enabled(ENABLE_COURSE_ABOUT_SIDEBAR_HTML)
allow_anonymous = check_public_access(course, [COURSE_VISIBILITY_PUBLIC, COURSE_VISIBILITY_PUBLIC_OUTLINE])
context = {
'course': course,
'course_details': course_details,
'staff_access': staff_access,
'studio_url': studio_url,
'registered': registered,
'course_target': course_target,
'is_cosmetic_price_enabled': settings.FEATURES.get('ENABLE_COSMETIC_DISPLAY_PRICE'),
'course_price': course_price,
'ecommerce_checkout': ecommerce_checkout,
'ecommerce_checkout_link': ecommerce_checkout_link,
'ecommerce_bulk_checkout_link': ecommerce_bulk_checkout_link,
'single_paid_mode': single_paid_mode,
'show_courseware_link': show_courseware_link,
'is_course_full': is_course_full,
'can_enroll': can_enroll,
'invitation_only': invitation_only,
'active_reg_button': active_reg_button,
'is_shib_course': is_shib_course,
# We do not want to display the internal courseware header, which is used when the course is found in the
# context. This value is therefore explicitly set to render the appropriate header.
'disable_courseware_header': True,
'pre_requisite_courses': pre_requisite_courses,
'course_image_urls': overview.image_urls,
'sidebar_html_enabled': sidebar_html_enabled,
'allow_anonymous': allow_anonymous,
}
return render_to_response('courseware/course_about.html', context)
@ensure_csrf_cookie
@cache_if_anonymous()
def program_marketing(request, program_uuid):
"""
Display the program marketing page.
"""
program_data = get_programs(uuid=program_uuid)
if not program_data:
raise Http404
program = ProgramMarketingDataExtender(program_data, request.user).extend()
program['type_slug'] = slugify(program['type'])
skus = program.get('skus')
ecommerce_service = EcommerceService()
context = {'program': program}
if program.get('is_learner_eligible_for_one_click_purchase') and skus:
context['buy_button_href'] = ecommerce_service.get_checkout_page_url(*skus, program_uuid=program_uuid)
context['uses_bootstrap'] = True
return render_to_response('courseware/program_marketing.html', context)
@login_required
@ensure_csrf_cookie
@ensure_valid_course_key
def dates(request, course_id):
"""
Display the course's dates.html, or 404 if there is no such course.
Assumes the course_id is in a valid format.
"""
from lms.urls import COURSE_DATES_NAME, RESET_COURSE_DEADLINES_NAME
course_key = CourseKey.from_string(course_id)
if course_home_mfe_dates_tab_is_active(course_key) and not request.user.is_staff:
microfrontend_url = get_learning_mfe_home_url(course_key=course_key, view_name=COURSE_DATES_NAME)
raise Redirect(microfrontend_url)
# Enable NR tracing for this view based on course
monitoring_utils.set_custom_attribute('course_id', text_type(course_key))
monitoring_utils.set_custom_attribute('user_id', request.user.id)
monitoring_utils.set_custom_attribute('is_staff', request.user.is_staff)
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=False)
masquerade = None
can_masquerade = request.user.has_perm(MASQUERADE_AS_STUDENT, course)
if can_masquerade:
masquerade, masquerade_user = setup_masquerade(
request,
course.id,
can_masquerade,
reset_masquerade_data=True,
)
request.user = masquerade_user
user_is_enrolled = CourseEnrollment.is_enrolled(request.user, course_key)
user_is_staff = bool(has_access(request.user, 'staff', course_key))
# Render the full content to enrolled users, as well as to course and global staff.
# Unenrolled users who are not course or global staff are redirected to the Outline Tab.
if not user_is_enrolled and not user_is_staff:
raise CourseAccessRedirect(reverse('openedx.course_experience.course_home', args=[course_id]))
course_date_blocks = get_course_date_blocks(course, request.user, request,
include_access=True, include_past_dates=True)
learner_is_full_access = not ContentTypeGatingConfig.enabled_for_enrollment(request.user, course_key)
# User locale settings
user_timezone_locale = user_timezone_locale_prefs(request)
user_timezone = user_timezone_locale['user_timezone']
user_language = user_timezone_locale['user_language']
missed_deadlines, missed_gated_content = dates_banner_should_display(course_key, request.user)
context = {
'course': course,
'course_date_blocks': course_date_blocks,
'verified_upgrade_link': verified_upgrade_deadline_link(request.user, course=course),
'learner_is_full_access': learner_is_full_access,
'user_timezone': user_timezone,
'user_language': user_language,
'supports_preview_menu': True,
'can_masquerade': can_masquerade,
'masquerade': masquerade,
'on_dates_tab': True,
'content_type_gating_enabled': ContentTypeGatingConfig.enabled_for_enrollment(
user=request.user,
course_key=course_key,
),
'missed_deadlines': missed_deadlines,
'missed_gated_content': missed_gated_content,
'reset_deadlines_url': reverse(RESET_COURSE_DEADLINES_NAME),
'has_ended': course.has_ended(),
}
return render_to_response('courseware/dates.html', context)
@transaction.non_atomic_requests
@login_required
@cache_control(no_cache=True, no_store=True, must_revalidate=True)
@ensure_valid_course_key
@data_sharing_consent_required
def progress(request, course_id, student_id=None):
""" Display the progress page. """
course_key = CourseKey.from_string(course_id)
with modulestore().bulk_operations(course_key):
return _progress(request, course_key, student_id)
def _progress(request, course_key, student_id):
"""
Unwrapped version of "progress".
User progress. We show the grade bar and every problem score.
Course staff are allowed to see the progress of students in their class.
"""
if student_id is not None:
try:
student_id = int(student_id)
# Check for ValueError if 'student_id' cannot be converted to integer.
except ValueError:
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
course = get_course_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
can_masquerade = request.user.has_perm(MASQUERADE_AS_STUDENT, course)
masquerade = None
if student_id is None or student_id == request.user.id:
# This will be a no-op for non-staff users, returning request.user
masquerade, student = setup_masquerade(request, course_key, can_masquerade, reset_masquerade_data=True)
else:
try:
coach_access = has_ccx_coach_role(request.user, course_key)
except CCXLocatorValidationException:
coach_access = False
has_access_on_students_profiles = staff_access or coach_access
# Requesting access to a different student's profile
if not has_access_on_students_profiles:
raise Http404
try:
student = User.objects.get(id=student_id)
except User.DoesNotExist:
raise Http404 # lint-amnesty, pylint: disable=raise-missing-from
# NOTE: To make sure impersonation by instructor works, use
# student instead of request.user in the rest of the function.
# The pre-fetching of groups is done to make auth checks not require an
# additional DB lookup (this kills the Progress page in particular).
prefetch_related_objects([student], 'groups')
if request.user.id != student.id:
# refetch the course as the assumed student
course = get_course_with_access(student, 'load', course_key, check_if_enrolled=True)
# NOTE: To make sure impersonation by instructor works, use
# student instead of request.user in the rest of the function.
course_grade = CourseGradeFactory().read(student, course)
courseware_summary = list(course_grade.chapter_grades.values())
studio_url = get_studio_url(course, 'settings/grading')
# checking certificate generation configuration
enrollment_mode, _ = CourseEnrollment.enrollment_mode_for_user(student, course_key)
course_expiration_fragment = generate_course_expired_fragment(student, course)
context = {
'course': course,
'courseware_summary': courseware_summary,
'studio_url': studio_url,
'grade_summary': course_grade.summary,
'can_masquerade': can_masquerade,
'staff_access': staff_access,
'masquerade': masquerade,
'supports_preview_menu': True,
'student': student,
'credit_course_requirements': credit_course_requirements(course_key, student),
'course_expiration_fragment': course_expiration_fragment,
'certificate_data': get_cert_data(student, course, enrollment_mode, course_grade)
}
context.update(
get_experiment_user_metadata_context(
course,
student,
)
)
with outer_atomic():
response = render_to_response('courseware/progress.html', context)
return response
def _downloadable_certificate_message(course, cert_downloadable_status): # lint-amnesty, pylint: disable=missing-function-docstring
if certs_api.has_html_certificates_enabled(course):
if certs_api.get_active_web_certificate(course) is not None:
return _downloadable_cert_data(
download_url=None,
cert_web_view_url=certs_api.get_certificate_url(
course_id=course.id, uuid=cert_downloadable_status['uuid']
)
)
elif not cert_downloadable_status['is_pdf_certificate']:
return GENERATING_CERT_DATA
return _downloadable_cert_data(download_url=cert_downloadable_status['download_url'])
def _missing_required_verification(student, enrollment_mode):
return (
enrollment_mode in CourseMode.VERIFIED_MODES and not IDVerificationService.user_is_verified(student)
)
def _certificate_message(student, course, enrollment_mode): # lint-amnesty, pylint: disable=missing-function-docstring
if certs_api.is_certificate_invalid(student, course.id):
return INVALID_CERT_DATA
cert_downloadable_status = certs_api.certificate_downloadable_status(student, course.id)
if cert_downloadable_status.get('earned_but_not_available'):
return EARNED_BUT_NOT_AVAILABLE_CERT_DATA
if cert_downloadable_status['is_generating']:
return GENERATING_CERT_DATA
if cert_downloadable_status['is_unverified']:
return UNVERIFIED_CERT_DATA
if cert_downloadable_status['is_downloadable']:
return _downloadable_certificate_message(course, cert_downloadable_status)
if _missing_required_verification(student, enrollment_mode):
return UNVERIFIED_CERT_DATA
return REQUESTING_CERT_DATA
def get_cert_data(student, course, enrollment_mode, course_grade=None):
"""Returns students course certificate related data.
Arguments:
student (User): Student for whom certificate to retrieve.
course (Course): Course object for which certificate data to retrieve.
enrollment_mode (String): Course mode in which student is enrolled.
course_grade (CourseGrade): Student's course grade record.
Returns:
returns dict if course certificate is available else None.
"""
cert_data = _certificate_message(student, course, enrollment_mode)
if not CourseMode.is_eligible_for_certificate(enrollment_mode, status=cert_data.cert_status):
return INELIGIBLE_PASSING_CERT_DATA.get(enrollment_mode)
if cert_data.cert_status == EARNED_BUT_NOT_AVAILABLE_CERT_STATUS:
return cert_data
certificates_enabled_for_course = certs_api.cert_generation_enabled(course.id)
if course_grade is None:
course_grade = CourseGradeFactory().read(student, course)
if not auto_certs_api.can_show_certificate_message(course, student, course_grade, certificates_enabled_for_course):
return
if not certs_api.get_active_web_certificate(course) and not auto_certs_api.is_valid_pdf_certificate(cert_data):
return
return cert_data
def credit_course_requirements(course_key, student):
"""Return information about which credit requirements a user has satisfied.
Arguments:
course_key (CourseKey): Identifier for the course.
student (User): Currently logged in user.
Returns: dict if the credit eligibility enabled and it is a credit course
and the user is enrolled in either verified or credit mode, and None otherwise.
"""
# If credit eligibility is not enabled or this is not a credit course,
# short-circuit and return `None`. This indicates that credit requirements
# should NOT be displayed on the progress page.
if not (settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY", False) and is_credit_course(course_key)):
return None
# This indicates that credit requirements should NOT be displayed on the progress page.
enrollment = CourseEnrollment.get_enrollment(student, course_key)
if enrollment and enrollment.mode not in REQUIREMENTS_DISPLAY_MODES:
return None
# Credit requirement statuses for which user does not remain eligible to get credit.
non_eligible_statuses = ['failed', 'declined']
# Retrieve the status of the user for each eligibility requirement in the course.
# For each requirement, the user's status is either "satisfied", "failed", or None.
# In this context, `None` means that we don't know the user's status, either because
# the user hasn't done something (for example, submitting photos for verification)
# or we're waiting on more information (for example, a response from the photo
# verification service).
requirement_statuses = get_credit_requirement_status(course_key, student.username)
# If the user has been marked as "eligible", then they are *always* eligible
# unless someone manually intervenes. This could lead to some strange behavior
# if the requirements change post-launch. For example, if the user was marked as eligible
# for credit, then a new requirement was added, the user will see that they're eligible
# AND that one of the requirements is still pending.
# We're assuming here that (a) we can mitigate this by properly training course teams,
# and (b) it's a better user experience to allow students who were at one time
# marked as eligible to continue to be eligible.
# If we need to, we can always manually move students back to ineligible by
# deleting CreditEligibility records in the database.
if is_user_eligible_for_credit(student.username, course_key):
eligibility_status = "eligible"
# If the user has *failed* any requirements (for example, if a photo verification is denied),
# then the user is NOT eligible for credit.
elif any(requirement['status'] in non_eligible_statuses for requirement in requirement_statuses):
eligibility_status = "not_eligible"
# Otherwise, the user may be eligible for credit, but the user has not
# yet completed all the requirements.
else:
eligibility_status = "partial_eligible"
return {
'eligibility_status': eligibility_status,
'requirements': requirement_statuses,
}
def _course_home_redirect_enabled():
"""
Return True value if user needs to be redirected to course home based on value of
`ENABLE_MKTG_SITE` and `ENABLE_COURSE_HOME_REDIRECT feature` flags
Returns: boolean True or False
"""
if configuration_helpers.get_value(
'ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False)
) and configuration_helpers.get_value(
'ENABLE_COURSE_HOME_REDIRECT', settings.FEATURES.get('ENABLE_COURSE_HOME_REDIRECT', True)
):
return True
@login_required
@ensure_valid_course_key
def submission_history(request, course_id, student_username, location):
"""Render an HTML fragment (meant for inclusion elsewhere) that renders a
history of all state changes made by this user for this problem location.
Right now this only works for problems because that's all
StudentModuleHistory records.
"""
course_key = CourseKey.from_string(course_id)
try:
usage_key = UsageKey.from_string(location).map_into_course(course_key)
except (InvalidKeyError, AssertionError):
return HttpResponse(escape(_(u'Invalid location.')))
course = get_course_overview_with_access(request.user, 'load', course_key)
staff_access = bool(has_access(request.user, 'staff', course))
# Permission Denied if they don't have staff access and are trying to see
# somebody else's submission history.
if (student_username != request.user.username) and (not staff_access):
raise PermissionDenied
user_state_client = DjangoXBlockUserStateClient()
try:
history_entries = list(user_state_client.get_history(student_username, usage_key))
except DjangoXBlockUserStateClient.DoesNotExist:
return HttpResponse(escape(_(u'User {username} has never accessed problem {location}').format(
username=student_username,
location=location
)))
# This is ugly, but until we have a proper submissions API that we can use to provide
# the scores instead, it will have to do.
csm = StudentModule.objects.filter(
module_state_key=usage_key,
student__username=student_username,
course_id=course_key)
scores = BaseStudentModuleHistory.get_history(csm)
if len(scores) != len(history_entries):
log.warning(
u"Mismatch when fetching scores for student "
u"history for course %s, user %s, xblock %s. "
u"%d scores were found, and %d history entries were found. "
u"Matching scores to history entries by date for display.",
course_id,
student_username,
location,
len(scores),
len(history_entries),
)
scores_by_date = {
score.created: score
for score in scores
}
scores = [
scores_by_date[history.updated]
for history in history_entries
]
context = {
'history_entries': history_entries,
'scores': scores,
'username': student_username,
'location': location,
'course_id': text_type(course_key)
}
return render_to_response('courseware/submission_history.html', context)
def get_static_tab_fragment(request, course, tab):
"""
Returns the fragment for the given static tab
"""
loc = course.id.make_usage_key(
tab.type,
tab.url_slug,
)
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id, request.user, modulestore().get_item(loc), depth=0
)
tab_module = get_module(
request.user, request, loc, field_data_cache, static_asset_path=course.static_asset_path, course=course
)
logging.debug(u'course_module = %s', tab_module)
fragment = Fragment()
if tab_module is not None:
try:
fragment = tab_module.render(STUDENT_VIEW, {})
except Exception: # pylint: disable=broad-except
fragment.content = render_to_string('courseware/error-message.html', None)
log.exception(
u"Error rendering course=%s, tab=%s", course, tab['url_slug']
)
return fragment
@require_GET
@ensure_valid_course_key
def get_course_lti_endpoints(request, course_id):
"""
View that, given a course_id, returns the a JSON object that enumerates all of the LTI endpoints for that course.
The LTI 2.0 result service spec at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
says "This specification document does not prescribe a method for discovering the endpoint URLs." This view
function implements one way of discovering these endpoints, returning a JSON array when accessed.
Arguments:
request (django request object): the HTTP request object that triggered this view function
course_id (unicode): id associated with the course
Returns:
(django response object): HTTP response. 404 if course is not found, otherwise 200 with JSON body.
"""
course_key = CourseKey.from_string(course_id)
try:
course = get_course(course_key, depth=2)
except ValueError:
return HttpResponse(status=404)
anonymous_user = AnonymousUser()
anonymous_user.known = False # make these "noauth" requests like module_render.handle_xblock_callback_noauth
lti_descriptors = modulestore().get_items(course.id, qualifiers={'category': 'lti'})
lti_descriptors.extend(modulestore().get_items(course.id, qualifiers={'category': 'lti_consumer'}))
lti_noauth_modules = [
get_module_for_descriptor(
anonymous_user,
request,
descriptor,
FieldDataCache.cache_for_descriptor_descendents(
course_key,
anonymous_user,
descriptor
),
course_key,
course=course
)
for descriptor in lti_descriptors
]
endpoints = [
{
'display_name': module.display_name,
'lti_2_0_result_service_json_endpoint': module.get_outcome_service_url(
service_name='lti_2_0_result_rest_handler') + "/user/{anon_user_id}",
'lti_1_1_result_service_xml_endpoint': module.get_outcome_service_url(
service_name='grade_handler'),
}
for module in lti_noauth_modules
]
return HttpResponse(json.dumps(endpoints), content_type='application/json') # lint-amnesty, pylint: disable=http-response-with-content-type-json, http-response-with-json-dumps
@login_required
def course_survey(request, course_id):
"""
URL endpoint to present a survey that is associated with a course_id
Note that the actual implementation of course survey is handled in the
views.py file in the Survey Djangoapp
"""
course_key = CourseKey.from_string(course_id)
course = get_course_with_access(request.user, 'load', course_key, check_survey_complete=False)
redirect_url = reverse(course_home_url_name(course.id), args=[course_id])
# if there is no Survey associated with this course,
# then redirect to the course instead
if not course.course_survey_name:
return redirect(redirect_url)
return survey_views.view_student_survey(
request.user,
course.course_survey_name,
course=course,
redirect_url=redirect_url,
is_required=course.course_survey_required,
)
def is_course_passed(student, course, course_grade=None):
"""
check user's course passing status. return True if passed
Arguments:
student : user object
course : course object
course_grade (CourseGrade) : contains student grade details.
Returns:
returns bool value
"""
if course_grade is None:
course_grade = CourseGradeFactory().read(student, course)
return course_grade.passed
# Grades can potentially be written - if so, let grading manage the transaction.
@transaction.non_atomic_requests
@require_POST
def generate_user_cert(request, course_id):
"""Start generating a new certificate for the user.
Certificate generation is allowed if:
* The user has passed the course, and
* The user does not already have a pending/completed certificate.
Note that if an error occurs during certificate generation
(for example, if the queue is down), then we simply mark the
certificate generation task status as "error" and re-run
the task with a management command. To students, the certificate
will appear to be "generating" until it is re-run.
Args:
request (HttpRequest): The POST request to this view.
course_id (unicode): The identifier for the course.
Returns:
HttpResponse: 200 on success, 400 if a new certificate cannot be generated.
"""
if not request.user.is_authenticated:
log.info(u"Anon user trying to generate certificate for %s", course_id)
return HttpResponseBadRequest(
_(u'You must be signed in to {platform_name} to create a certificate.').format(
platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)
)
)
student = request.user
course_key = CourseKey.from_string(course_id)
course = modulestore().get_course(course_key, depth=2)
if not course:
return HttpResponseBadRequest(_("Course is not valid"))
if certs_api.is_using_certificate_allowlist_and_is_on_allowlist(student, course_key):
log.info(f'{course_key} is using allowlist certificates, and the user {student.id} is on its allowlist. '
f'Attempt will be made to generate an allowlist certificate.')
certs_api.generate_allowlist_certificate_task(student, course_key)
return HttpResponse()
if not is_course_passed(student, course):
log.info(u"User %s has not passed the course: %s", student.username, course_id)
return HttpResponseBadRequest(_("Your certificate will be available when you pass the course."))
certificate_status = certs_api.certificate_downloadable_status(student, course.id)
log.info(
u"User %s has requested for certificate in %s, current status: is_downloadable: %s, is_generating: %s",
student.username,
course_id,
certificate_status["is_downloadable"],
certificate_status["is_generating"],
)
if certificate_status["is_downloadable"]:
return HttpResponseBadRequest(_("Certificate has already been created."))
elif certificate_status["is_generating"]:
return HttpResponseBadRequest(_("Certificate is being created."))
else:
# If the certificate is not already in-process or completed,
# then create a new certificate generation task.
# If the certificate cannot be added to the queue, this will
# mark the certificate with "error" status, so it can be re-run
# with a management command. From the user's perspective,
# it will appear that the certificate task was submitted successfully.
certs_api.generate_user_certificates(student, course.id, course=course, generation_mode='self')
_track_successful_certificate_generation(student.id, course.id)
return HttpResponse()
def _track_successful_certificate_generation(user_id, course_id):
"""
Track a successful certificate generation event.
Arguments:
user_id (str): The ID of the user generating the certificate.
course_id (CourseKey): Identifier for the course.
Returns:
None
"""
event_name = 'edx.bi.user.certificate.generate'
segment.track(user_id, event_name, {
'category': 'certificates',
'label': text_type(course_id)
})
def enclosing_sequence_for_gating_checks(block):
"""
Return the first ancestor of this block that is a SequenceDescriptor.
Returns None if there is no such ancestor. Returns None if you call it on a
SequenceDescriptor directly.
We explicitly test against the three known tag types that map to sequences
(even though two of them have been long since deprecated and are never
used). We _don't_ test against SequentialDescriptor directly because:
1. A direct comparison on the type fails because we magically mix it into a
SequenceDescriptorWithMixins object.
2. An isinstance check doesn't give us the right behavior because Courses
and Sections both subclass SequenceDescriptor. >_<
Also important to note that some content isn't contained in Sequences at
all. LabXchange uses learning pathways, but even content inside courses like
`static_tab`, `book`, and `about` live outside the sequence hierarchy.
"""
seq_tags = ['sequential', 'problemset', 'videosequence']
# If it's being called on a Sequence itself, then don't bother crawling the
# ancestor tree, because all the sequence metadata we need for gating checks
# will happen automatically when rendering the render_xblock view anyway,
# and we don't want weird, weird edge cases where you have nested Sequences
# (which would probably "work" in terms of OLX import).
if block.location.block_type in seq_tags:
return None
ancestor = block
while ancestor and ancestor.location.block_type not in seq_tags:
ancestor = ancestor.get_parent() # Note: CourseDescriptor's parent is None
return ancestor
@require_http_methods(["GET", "POST"])
@ensure_valid_usage_key
@xframe_options_exempt
@transaction.non_atomic_requests
@ensure_csrf_cookie
def render_xblock(request, usage_key_string, check_if_enrolled=True):
"""
Returns an HttpResponse with HTML content for the xBlock with the given usage_key.
The returned HTML is a chromeless rendering of the xBlock (excluding content of the containing courseware).
"""
from lms.urls import RESET_COURSE_DEADLINES_NAME
from openedx.features.course_experience.urls import COURSE_HOME_VIEW_NAME
usage_key = UsageKey.from_string(usage_key_string)
usage_key = usage_key.replace(course_key=modulestore().fill_in_run(usage_key.course_key))
course_key = usage_key.course_key
# Gathering metrics to make performance measurements easier.
set_custom_attributes_for_course_key(course_key)
set_custom_attribute('usage_key', usage_key_string)
set_custom_attribute('block_type', usage_key.block_type)
requested_view = request.GET.get('view', 'student_view')
if requested_view != 'student_view' and requested_view != 'public_view': # lint-amnesty, pylint: disable=consider-using-in
return HttpResponseBadRequest(
u"Rendering of the xblock view '{}' is not supported.".format(bleach.clean(requested_view, strip=True))
)
staff_access = has_access(request.user, 'staff', course_key)
_course_masquerade, request.user = setup_masquerade(request, course_key, staff_access)
with modulestore().bulk_operations(course_key):
# verify the user has access to the course, including enrollment check
try:
course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=check_if_enrolled)
except CourseAccessRedirect:
raise Http404("Course not found.") # lint-amnesty, pylint: disable=raise-missing-from
# get the block, which verifies whether the user has access to the block.
recheck_access = request.GET.get('recheck_access') == '1'
block, _ = get_module_by_usage_id(
request, str(course_key), str(usage_key), disable_staff_debug_info=True, course=course,
will_recheck_access=recheck_access
)
student_view_context = request.GET.dict()
student_view_context['show_bookmark_button'] = request.GET.get('show_bookmark_button', '0') == '1'
student_view_context['show_title'] = request.GET.get('show_title', '1') == '1'
is_learning_mfe = is_request_from_learning_mfe(request)
# Right now, we only care about this in regards to the Learning MFE because it results
# in a bad UX if we display blocks with access errors (repeated upgrade messaging).
# If other use cases appear, consider removing the is_learning_mfe check or switching this
# to be its own query parameter that can toggle the behavior.
student_view_context['hide_access_error_blocks'] = is_learning_mfe and recheck_access
enable_completion_on_view_service = False
completion_service = block.runtime.service(block, 'completion')
if completion_service and completion_service.completion_tracking_enabled():
if completion_service.blocks_to_mark_complete_on_view({block}):
enable_completion_on_view_service = True
student_view_context['wrap_xblock_data'] = {
'mark-completed-on-view-after-delay': completion_service.get_complete_on_view_delay_ms()
}
missed_deadlines, missed_gated_content = dates_banner_should_display(course_key, request.user)
# Some content gating happens only at the Sequence level (e.g. "has this
# timed exam started?").
ancestor_seq = enclosing_sequence_for_gating_checks(block)
if ancestor_seq:
seq_usage_key = ancestor_seq.location
# We have a Descriptor, but I had trouble getting a SequenceModule
# from it (even using ._xmodule to force the conversion) because the
# runtime wasn't properly initialized. This view uses multiple
# runtimes (including Blockstore), so I'm pulling it from scratch
# based on the usage_key. We'll have to watch the performance impact
# of this. :(
seq_module_descriptor, _ = get_module_by_usage_id(
request, str(course_key), str(seq_usage_key), disable_staff_debug_info=True, course=course
)
# I'm not at all clear why get_module_by_usage_id returns the
# descriptor or why I need to manually force it to load the module
# like this manually instead of the proxying working, but trial and
# error has led me here. Hopefully all this weirdness goes away when
# SequenceModule gets converted to an XBlock in:
# https://github.com/edx/edx-platform/pull/25965
seq_module = seq_module_descriptor._xmodule # pylint: disable=protected-access
# If the SequenceModule feels that gating is necessary, redirect
# there so we can have some kind of error message at any rate.
if seq_module.descendants_are_gated():
return redirect(
reverse(
'render_xblock',
kwargs={'usage_key_string': str(seq_module.location)}
)
)
fragment = block.render(requested_view, context=student_view_context)
optimization_flags = get_optimization_flags_for_content(block, fragment)
context = {
'fragment': fragment,
'course': course,
'disable_accordion': True,
'allow_iframing': True,
'disable_header': True,
'disable_footer': True,
'disable_window_wrap': True,
'enable_completion_on_view_service': enable_completion_on_view_service,
'edx_notes_enabled': is_feature_enabled(course, request.user),
'staff_access': staff_access,
'xqa_server': settings.FEATURES.get('XQA_SERVER', 'http://your_xqa_server.com'),
'missed_deadlines': missed_deadlines,
'missed_gated_content': missed_gated_content,
'has_ended': course.has_ended(),
'web_app_course_url': reverse(COURSE_HOME_VIEW_NAME, args=[course.id]),
'on_courseware_page': True,
'verified_upgrade_link': verified_upgrade_deadline_link(request.user, course=course),
'is_learning_mfe': is_learning_mfe,
'is_mobile_app': is_request_from_mobile_app(request),
'reset_deadlines_url': reverse(RESET_COURSE_DEADLINES_NAME),
**optimization_flags,
}
return render_to_response('courseware/courseware-chromeless.html', context)
def get_optimization_flags_for_content(block, fragment):
"""
Return a dict with a set of display options appropriate for the block.
This is going to start in a very limited way.
"""
safe_defaults = {
'enable_mathjax': True
}
# Only run our optimizations on the leaf HTML and ProblemBlock nodes. The
# mobile apps access these directly, and we don't have to worry about
# XBlocks that dynamically load content, like inline discussions.
usage_key = block.location
# For now, confine ourselves to optimizing just the HTMLBlock
if usage_key.block_type != 'html':
return safe_defaults
if not COURSEWARE_OPTIMIZED_RENDER_XBLOCK.is_enabled(usage_key.course_key):
return safe_defaults
inspector = XBlockContentInspector(block, fragment)
flags = dict(safe_defaults)
flags['enable_mathjax'] = inspector.has_mathjax_content()
return flags
class XBlockContentInspector:
"""
Class to inspect rendered XBlock content to determine dependencies.
A lot of content has been written with the assumption that certain
JavaScript and assets are available. This has caused us to continue to
include these assets in the render_xblock view, despite the fact that they
are not used by the vast majority of content.
In order to try to provide faster load times for most users on most content,
this class has the job of detecting certain patterns in XBlock content that
would imply these dependencies, so we know when to include them or not.
"""
def __init__(self, block, fragment):
self.block = block
self.fragment = fragment
def has_mathjax_content(self):
"""
Returns whether we detect any MathJax in the fragment.
Note that this only works for things that are rendered up front. If an
XBlock is capable of modifying the DOM afterwards to inject math content
into the page, this will not catch it.
"""
# The following pairs are used to mark Mathjax syntax in XBlocks. There
# are other options for the wiki, but we don't worry about those here.
MATHJAX_TAG_PAIRS = [
(r"\(", r"\)"),
(r"\[", r"\]"),
("[mathjaxinline]", "[/mathjaxinline]"),
("[mathjax]", "[/mathjax]"),
]
content = self.fragment.body_html()
for (start_tag, end_tag) in MATHJAX_TAG_PAIRS:
if start_tag in content and end_tag in content:
return True
return False
# Translators: "percent_sign" is the symbol "%". "platform_name" is a
# string identifying the name of this installation, such as "edX".
FINANCIAL_ASSISTANCE_HEADER = _(
u'{platform_name} now offers financial assistance for learners who want to earn Verified Certificates but'
u' who may not be able to pay the Verified Certificate fee. Eligible learners may receive up to 90{percent_sign} off' # lint-amnesty, pylint: disable=line-too-long
' the Verified Certificate fee for a course.\nTo apply for financial assistance, enroll in the'
' audit track for a course that offers Verified Certificates, and then complete this application.'
' Note that you must complete a separate application for each course you take.\n We plan to use this'
' information to evaluate your application for financial assistance and to further develop our'
' financial assistance program.'
)
def _get_fa_header(header):
return header.\
format(percent_sign="%",
platform_name=configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)).split('\n')
FA_INCOME_LABEL = ugettext_noop('Annual Household Income')
FA_REASON_FOR_APPLYING_LABEL = ugettext_noop('Tell us about your current financial situation. Why do you need assistance?') # lint-amnesty, pylint: disable=line-too-long
FA_GOALS_LABEL = ugettext_noop('Tell us about your learning or professional goals. How will a Verified Certificate in this course help you achieve these goals?') # lint-amnesty, pylint: disable=line-too-long
FA_EFFORT_LABEL = ugettext_noop('Tell us about your plans for this course. What steps will you take to help you complete the course work and receive a certificate?') # lint-amnesty, pylint: disable=line-too-long
FA_SHORT_ANSWER_INSTRUCTIONS = _('Use between 1250 and 2500 characters or so in your response.')
@login_required
def financial_assistance(_request):
"""Render the initial financial assistance page."""
return render_to_response('financial-assistance/financial-assistance.html', {
'header_text': _get_fa_header(FINANCIAL_ASSISTANCE_HEADER)
})
@login_required
@require_POST
def financial_assistance_request(request):
"""Submit a request for financial assistance to Zendesk."""
try:
data = json.loads(request.body.decode('utf8'))
# Simple sanity check that the session belongs to the user
# submitting an FA request
username = data['username']
if request.user.username != username:
return HttpResponseForbidden()
course_id = data['course']
course = modulestore().get_course(CourseKey.from_string(course_id))
legal_name = data['name']
email = data['email']
country = data['country']
income = data['income']
reason_for_applying = data['reason_for_applying']
goals = data['goals']
effort = data['effort']
marketing_permission = data['mktg-permission']
ip_address = get_client_ip(request)[0]
except ValueError:
# Thrown if JSON parsing fails
return HttpResponseBadRequest(u'Could not parse request JSON.')
except InvalidKeyError:
# Thrown if course key parsing fails
return HttpResponseBadRequest(u'Could not parse request course key.')
except KeyError as err:
# Thrown if fields are missing
return HttpResponseBadRequest(u'The field {} is required.'.format(text_type(err)))
zendesk_submitted = create_zendesk_ticket(
legal_name,
email,
u'Financial assistance request for learner {username} in course {course_name}'.format(
username=username,
course_name=course.display_name
),
u'Financial Assistance Request',
tags={'course_id': course_id},
# Send the application as additional info on the ticket so
# that it is not shown when support replies. This uses
# OrderedDict so that information is presented in the right
# order.
additional_info=OrderedDict((
('Username', username),
('Full Name', legal_name),
('Course ID', course_id),
(FA_INCOME_LABEL, income),
('Country', country),
('Allowed for marketing purposes', 'Yes' if marketing_permission else 'No'),
(FA_REASON_FOR_APPLYING_LABEL, '\n' + reason_for_applying + '\n\n'),
(FA_GOALS_LABEL, '\n' + goals + '\n\n'),
(FA_EFFORT_LABEL, '\n' + effort + '\n\n'),
('Client IP', ip_address),
)),
group='Financial Assistance',
)
if not (zendesk_submitted == 200 or zendesk_submitted == 201): # lint-amnesty, pylint: disable=consider-using-in
# The call to Zendesk failed. The frontend will display a
# message to the user.
return HttpResponse(status=status.HTTP_500_INTERNAL_SERVER_ERROR)
return HttpResponse(status=status.HTTP_204_NO_CONTENT)
@login_required
def financial_assistance_form(request):
"""Render the financial assistance application form page."""
user = request.user
enrolled_courses = get_financial_aid_courses(user)
incomes = ['Less than $5,000', '$5,000 - $10,000', '$10,000 - $15,000', '$15,000 - $20,000', '$20,000 - $25,000',
'$25,000 - $40,000', '$40,000 - $55,000', '$55,000 - $70,000', '$70,000 - $85,000',
'$85,000 - $100,000', 'More than $100,000']
annual_incomes = [
{'name': _(income), 'value': income} for income in incomes # lint-amnesty, pylint: disable=translation-of-non-string
]
return render_to_response('financial-assistance/apply.html', {
'header_text': _get_fa_header(FINANCIAL_ASSISTANCE_HEADER),
'student_faq_url': marketing_link('FAQ'),
'dashboard_url': reverse('dashboard'),
'account_settings_url': reverse('account_settings'),
'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME),
'user_details': {
'email': user.email,
'username': user.username,
'name': user.profile.name,
'country': text_type(user.profile.country.name),
},
'submit_url': reverse('submit_financial_assistance_request'),
'fields': [
{
'name': 'course',
'type': 'select',
'label': _('Course'),
'placeholder': '',
'defaultValue': '',
'required': True,
'options': enrolled_courses,
'instructions': ugettext(
'Select the course for which you want to earn a verified certificate. If'
' the course does not appear in the list, make sure that you have enrolled'
' in the audit track for the course.'
)
},
{
'name': 'income',
'type': 'select',
'label': _(FA_INCOME_LABEL), # lint-amnesty, pylint: disable=translation-of-non-string
'placeholder': '',
'defaultValue': '',
'required': True,
'options': annual_incomes,
'instructions': _('Specify your annual household income in US Dollars.')
},
{
'name': 'reason_for_applying',
'type': 'textarea',
'label': _(FA_REASON_FOR_APPLYING_LABEL), # lint-amnesty, pylint: disable=translation-of-non-string
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'name': 'goals',
'type': 'textarea',
'label': _(FA_GOALS_LABEL), # lint-amnesty, pylint: disable=translation-of-non-string
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'name': 'effort',
'type': 'textarea',
'label': _(FA_EFFORT_LABEL), # lint-amnesty, pylint: disable=translation-of-non-string
'placeholder': '',
'defaultValue': '',
'required': True,
'restrictions': {
'min_length': settings.FINANCIAL_ASSISTANCE_MIN_LENGTH,
'max_length': settings.FINANCIAL_ASSISTANCE_MAX_LENGTH
},
'instructions': FA_SHORT_ANSWER_INSTRUCTIONS
},
{
'placeholder': '',
'name': 'mktg-permission',
'label': _(
'I allow edX to use the information provided in this application '
'(except for financial information) for edX marketing purposes.'
),
'defaultValue': '',
'type': 'checkbox',
'required': False,
'instructions': '',
'restrictions': {}
}
],
})
def get_financial_aid_courses(user):
""" Retrieve the courses eligible for financial assistance. """
financial_aid_courses = []
for enrollment in CourseEnrollment.enrollments_for_user(user).order_by('-created'):
if enrollment.mode != CourseMode.VERIFIED and \
enrollment.course_overview and \
enrollment.course_overview.eligible_for_financial_aid and \
CourseMode.objects.filter(
Q(_expiration_datetime__isnull=True) | Q(_expiration_datetime__gt=datetime.now(UTC)),
course_id=enrollment.course_id,
mode_slug=CourseMode.VERIFIED).exists():
financial_aid_courses.append(
{
'name': enrollment.course_overview.display_name,
'value': text_type(enrollment.course_id)
}
)
return financial_aid_courses
| agpl-3.0 |
defionscode/ansible | lib/ansible/module_utils/facts/compat.py | 147 | 4115 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2017 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.namespace import PrefixFactNamespace
from ansible.module_utils.facts import default_collectors
from ansible.module_utils.facts import ansible_collector
def get_all_facts(module):
'''compat api for ansible 2.2/2.3 module_utils.facts.get_all_facts method
Expects module to be an instance of AnsibleModule, with a 'gather_subset' param.
returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to
the fact value.'''
gather_subset = module.params['gather_subset']
return ansible_facts(module, gather_subset=gather_subset)
def ansible_facts(module, gather_subset=None):
'''Compat api for ansible 2.0/2.2/2.3 module_utils.facts.ansible_facts method
2.3/2.3 expects a gather_subset arg.
2.0/2.1 does not except a gather_subset arg
So make gather_subsets an optional arg, defaulting to configured DEFAULT_GATHER_TIMEOUT
'module' should be an instance of an AnsibleModule.
returns a dict mapping the bare fact name ('default_ipv4' with no 'ansible_' namespace) to
the fact value.
'''
gather_subset = gather_subset or module.params.get('gather_subset', ['all'])
gather_timeout = module.params.get('gather_timeout', 10)
filter_spec = module.params.get('filter', '*')
minimal_gather_subset = frozenset(['apparmor', 'caps', 'cmdline', 'date_time',
'distribution', 'dns', 'env', 'fips', 'local',
'lsb', 'pkg_mgr', 'platform', 'python', 'selinux',
'service_mgr', 'ssh_pub_keys', 'user'])
all_collector_classes = default_collectors.collectors
# don't add a prefix
namespace = PrefixFactNamespace(namespace_name='ansible', prefix='')
fact_collector = \
ansible_collector.get_ansible_collector(all_collector_classes=all_collector_classes,
namespace=namespace,
filter_spec=filter_spec,
gather_subset=gather_subset,
gather_timeout=gather_timeout,
minimal_gather_subset=minimal_gather_subset)
facts_dict = fact_collector.collect(module=module)
return facts_dict
| gpl-3.0 |
CasataliaLabs/biscuit_drishtiman | v4l_capture_example.py | 1 | 1279 | import Image
import select
import v4l2capture
import numpy
import pylab
import time
# Open the video device.
#~ video = v4l2capture.Video_device("/dev/video0")
video = v4l2capture.Video_device("http://admin:@192.168.1.105/snapshot.cgi?.mjpeg")
# Suggest an image size to the device. The device may choose and
# return another size if it doesn't support the suggested one.
size_x, size_y = video.set_format(800, 448)
# Create a buffer to store image data in. This must be done before
# calling 'start' if v4l2capture is compiled with libv4l2. Otherwise
# raises IOError.
video.create_buffers(1)
# Send the buffer to the device. Some devices require this to be done
# before calling 'start'.
video.queue_all_buffers()
# Start the device. This lights the LED if it's a camera that has one.
video.start()
# Wait for the device to fill the buffer.
select.select((video,), (), ())
# The rest is easy :-)
image_data = video.read()
video.close()
#~ image = Image.fromstring("L", (size_x, size_y), image_data)
image = Image.fromstring("RGB", (size_x, size_y), image_data)
imageNumpy = numpy.asarray(image)
pylab.imshow(imageNumpy)
pylab.show()
#~ a = input('test')
#time.sleep(4)
#image.save("image.jpg")
#print "Saved image.jpg (Size: " + str(size_x) + " x " + str(size_y) + ")"
| gpl-3.0 |
zhaobj/MyHadoop | src/contrib/hod/testing/lib.py | 182 | 3458 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import unittest, re, sys
class BaseTestSuite():
def __init__(self, name, excludes):
self.name = name
self.excludes = excludes
pass
def runTests(self):
# Create a runner
self.runner = unittest.TextTestRunner()
# Get all the test-case classes
# From module import *
mod = __import__(self.name, fromlist=['*'])
modItemsList = dir(mod)
allsuites = []
# Create all the test suites
for modItem in modItemsList:
if re.search(r"^test_", modItem):
# Yes this is a test class
if modItem not in self.excludes:
test_class = getattr(mod, modItem)
allsuites.append(unittest.makeSuite(test_class))
# Create a master suite to be run.
alltests = unittest.TestSuite(tuple(allsuites))
# Run the master test suite.
runner = self.runner.run(alltests)
if(runner.wasSuccessful()): return 0
printLine( "%s test(s) failed." % runner.failures.__len__())
printLine( "%s test(s) threw errors." % runner.errors.__len__())
return runner.failures.__len__() + runner.errors.__len__()
def cleanUp(self):
# suite tearDown
pass
def printLine(str):
print >>sys.stderr, str
def printSeparator():
str = ""
for i in range(0,79):
str = str + "*"
print >>sys.stderr, "\n", str, "\n"
# This class captures all log messages logged by hodRunner and other classes.
# It is then used to verify that certain log messages have come. This is one
# way to validate that messages printed to the logger are correctly written.
class MockLogger:
def __init__(self):
self.__logLines = {}
def info(self, message):
self.__logLines[message] = 'info'
def critical(self, message):
self.__logLines[message] = 'critical'
def warn(self, message):
self.__logLines[message] = 'warn'
def debug(self, message):
# don't track debug lines.
pass
# verify a certain message has been logged at the defined level of severity.
def hasMessage(self, message, level):
if not self.__logLines.has_key(message):
return False
return self.__logLines[message] == level
# Stub class to test cluster manipulation operations.
class MockHadoopCluster:
def __init__(self):
# store the operations received.
self.__operations = {}
def delete_job(self, jobid):
self.__operations['delete_job'] = [jobid]
def is_cluster_deallocated(self, dummy):
return False
def wasOperationPerformed(self, operation, args):
if self.__operations.has_key(operation):
actualArgs = self.__operations[operation]
for arg in actualArgs:
if arg not in args:
break
else:
return True
return False
| apache-2.0 |
vipod/pyzimbra | pyzimbra/soap_auth.py | 2 | 6473 | # -*- coding: utf-8 -*-
"""
################################################################################
# Copyright (c) 2010, Ilgar Mashayev
#
# E-mail: pyzimbra@lab.az
# Website: http://github.com/ilgarm/pyzimbra
################################################################################
# This file is part of pyzimbra.
#
# Pyzimbra is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyzimbra is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyzimbra. If not, see <http://www.gnu.org/licenses/>.
################################################################################
Soap related methods and classes.
@author: ilgar
"""
from pyzimbra import zconstant, sconstant, util
from pyzimbra.auth import AuthException, AuthToken, Authenticator
from pyzimbra.soap import SoapException
from time import time
import SOAPpy
import hashlib
import hmac
import logging
class SoapAuthenticator(Authenticator):
"""
Soap authenticator.
"""
# --------------------------------------------------------------- properties
# -------------------------------------------------------------------- bound
def __init__(self):
Authenticator.__init__(self)
self.log = logging.getLogger(__name__)
# ------------------------------------------------------------------ unbound
def authenticate_admin(self, transport, account_name, password):
"""
Authenticates administrator using username and password.
"""
Authenticator.authenticate_admin(self, transport, account_name, password)
auth_token = AuthToken()
auth_token.account_name = account_name
params = {sconstant.E_NAME: account_name,
sconstant.E_PASSWORD: password}
self.log.debug('Authenticating admin %s' % account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ADMIN_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
auth_token.token = res.authToken
auth_token.session_id = res.sessionId
self.log.info('Authenticated admin %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
def authenticate(self, transport, account_name, password=None):
"""
Authenticates account using soap method.
"""
Authenticator.authenticate(self, transport, account_name, password)
if password == None:
return self.pre_auth(transport, account_name)
else:
return self.auth(transport, account_name, password)
def auth(self, transport, account_name, password):
"""
Authenticates using username and password.
"""
auth_token = AuthToken()
auth_token.account_name = account_name
attrs = {sconstant.A_BY: sconstant.V_NAME}
account = SOAPpy.Types.stringType(data=account_name, attrs=attrs)
params = {sconstant.E_ACCOUNT: account,
sconstant.E_PASSWORD: password}
self.log.debug('Authenticating account %s' % account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
if type(res) is tuple:
auth_token.token = res[0].authToken
else:
auth_token.token = res.authToken
if hasattr(res, 'sessionId'):
auth_token.session_id = res.sessionId
self.log.info('Authenticated account %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
def pre_auth(self, transport, account_name):
"""
Authenticates using username and domain key.
"""
auth_token = AuthToken()
auth_token.account_name = account_name
domain = util.get_domain(account_name)
if domain == None:
raise AuthException('Invalid auth token account')
if domain in self.domains:
domain_key = self.domains[domain]
else:
domain_key = None
if domain_key == None:
raise AuthException('Invalid domain key for domain %s' % domain)
self.log.debug('Initialized domain key for account %s'
% account_name)
expires = 0
timestamp = int(time() * 1000)
pak = hmac.new(domain_key, '%s|%s|%s|%s' %
(account_name, sconstant.E_NAME, expires, timestamp),
hashlib.sha1).hexdigest()
attrs = {sconstant.A_BY: sconstant.V_NAME}
account = SOAPpy.Types.stringType(data=account_name, attrs=attrs)
attrs = {sconstant.A_TIMESTAMP: timestamp, sconstant.A_EXPIRES: expires}
preauth = SOAPpy.Types.stringType(data=pak,
name=sconstant.E_PREAUTH,
attrs=attrs)
params = {sconstant.E_ACCOUNT: account,
sconstant.E_PREAUTH: preauth}
self.log.debug('Authenticating account %s using domain key'
% account_name)
try:
res = transport.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.AuthRequest,
params,
auth_token)
except SoapException as exc:
raise AuthException(unicode(exc), exc)
auth_token.token = res.authToken
auth_token.session_id = res.sessionId
self.log.info('Authenticated account %s, session id %s'
% (account_name, auth_token.session_id))
return auth_token
| lgpl-3.0 |
sarvex/tensorflow | tensorflow/python/keras/layers/noise_test.py | 6 | 2995 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for noise layers."""
import numpy as np
from tensorflow.python import keras
from tensorflow.python.framework import dtypes
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
@keras_parameterized.run_all_keras_modes
class NoiseLayersTest(keras_parameterized.TestCase):
def test_GaussianNoise(self):
testing_utils.layer_test(
keras.layers.GaussianNoise,
kwargs={'stddev': 1.},
input_shape=(3, 2, 3))
def test_GaussianDropout(self):
testing_utils.layer_test(
keras.layers.GaussianDropout,
kwargs={'rate': 0.5},
input_shape=(3, 2, 3))
def test_AlphaDropout(self):
testing_utils.layer_test(
keras.layers.AlphaDropout, kwargs={'rate': 0.2}, input_shape=(3, 2, 3))
@staticmethod
def _make_model(dtype, class_type):
assert dtype in (dtypes.float32, dtypes.float64)
assert class_type in ('gaussian_noise', 'gaussian_dropout', 'alpha_noise')
model = keras.Sequential()
model.add(keras.layers.Dense(8, input_shape=(32,), dtype=dtype))
if class_type == 'gaussian_noise':
layer = keras.layers.GaussianNoise(0.0003, dtype=dtype)
elif class_type == 'gaussian_dropout':
layer = keras.layers.GaussianDropout(0.1, dtype=dtype)
else:
layer = keras.layers.AlphaDropout(0.5, dtype=dtype)
model.add(layer)
return model
def _train_model(self, dtype, gtype):
model = self._make_model(dtype, gtype)
model.compile(
optimizer='sgd',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((8, 32)), np.zeros((8, 8)))
def test_noise_float32(self):
self._train_model(dtypes.float32, 'gaussian_noise')
def test_noise_float64(self):
self._train_model(dtypes.float64, 'gaussian_noise')
def test_dropout_float32(self):
self._train_model(dtypes.float32, 'gaussian_dropout')
def test_dropout_float64(self):
self._train_model(dtypes.float64, 'gaussian_dropout')
def test_alpha_dropout_float32(self):
self._train_model(dtypes.float32, 'alpha_noise')
def test_alpha_dropout_float64(self):
self._train_model(dtypes.float64, 'alpha_noise')
if __name__ == '__main__':
test.main()
| apache-2.0 |
supamii/QttpServer | lib/gmock/gtest/test/gtest_xml_output_unittest.py | 1815 | 14580 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module"""
__author__ = 'eefacm@gmail.com (Sean Mcafee)'
import datetime
import errno
import os
import re
import sys
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_FILTER_FLAG = '--gtest_filter'
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
GTEST_OUTPUT_FLAG = "--gtest_output"
GTEST_DEFAULT_OUTPUT_FILE = "test_detail.xml"
GTEST_PROGRAM_NAME = "gtest_xml_output_unittest_"
SUPPORTS_STACK_TRACES = False
if SUPPORTS_STACK_TRACES:
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
else:
STACK_TRACE_TEMPLATE = ''
EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="23" failures="4" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
<testsuite name="FailedTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="Fails" status="run" time="*" classname="FailedTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="MixedResultTest"/>
<testcase name="Fails" status="run" time="*" classname="MixedResultTest">
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 2
Expected: 1%(stack)s]]></failure>
<failure message="gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Value of: 3
Expected: 2%(stack)s]]></failure>
</testcase>
<testcase name="DISABLED_test" status="notrun" time="*" classname="MixedResultTest"/>
</testsuite>
<testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="OutputsCData" status="run" time="*" classname="XmlQuotingTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" errors="0" time="*">
<testcase name="InvalidCharactersInMessage" status="run" time="*" classname="InvalidCharactersTest">
<failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []%(stack)s]]></failure>
</testcase>
</testsuite>
<testsuite name="DisabledTest" tests="1" failures="0" disabled="1" errors="0" time="*">
<testcase name="DISABLED_test_not_run" status="notrun" time="*" classname="DisabledTest"/>
</testsuite>
<testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" errors="0" time="*" SetUpTestCase="yes" TearDownTestCase="aye">
<testcase name="OneProperty" status="run" time="*" classname="PropertyRecordingTest" key_1="1"/>
<testcase name="IntValuedProperty" status="run" time="*" classname="PropertyRecordingTest" key_int="1"/>
<testcase name="ThreeProperties" status="run" time="*" classname="PropertyRecordingTest" key_1="1" key_2="2" key_3="3"/>
<testcase name="TwoValuesForOneKeyUsesLastValue" status="run" time="*" classname="PropertyRecordingTest" key_1="2"/>
</testsuite>
<testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" errors="0" time="*">
<testcase name="RecordProperty" status="run" time="*" classname="NoFixtureTest" key="1"/>
<testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_int="1"/>
<testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" time="*" classname="NoFixtureTest" key_for_utility_string="1"/>
</testsuite>
<testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="HasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" time="*" classname="Single/ValueParamTest" />
<testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" time="*" classname="Single/ValueParamTest" />
</testsuite>
<testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/0" />
</testsuite>
<testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="TypedTest/1" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/0" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/0" />
</testsuite>
<testsuite name="Single/TypeParameterizedTestCase/1" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="HasTypeParamAttribute" type_param="*" status="run" time="*" classname="Single/TypeParameterizedTestCase/1" />
</testsuite>
</testsuites>""" % {'stack': STACK_TRACE_TEMPLATE}
EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests" ad_hoc_property="42">
<testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0"
errors="0" time="*">
<testcase name="Succeeds" status="run" time="*" classname="SuccessfulTest"/>
</testsuite>
</testsuites>"""
EXPECTED_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="0" failures="0" disabled="0" errors="0" time="*"
timestamp="*" name="AllTests">
</testsuites>"""
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase):
"""
Unit test for Google Test's XML output functionality.
"""
# This test currently breaks on platforms that do not support typed and
# type-parameterized tests, so we don't run it under them.
if SUPPORTS_TYPED_TESTS:
def testNonEmptyXmlOutput(self):
"""
Runs a test program that generates a non-empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1)
def testEmptyXmlOutput(self):
"""Verifies XML output for a Google Test binary without actual tests.
Runs a test program that generates an empty XML output, and
tests that the XML output is expected.
"""
self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_EMPTY_XML, 0)
def testTimestampValue(self):
"""Checks whether the timestamp attribute in the XML output is valid.
Runs a test program that generates an empty XML output, and checks if
the timestamp attribute in the testsuites tag is valid.
"""
actual = self._GetXmlOutput('gtest_no_test_unittest', [], 0)
date_time_str = actual.documentElement.getAttributeNode('timestamp').value
# datetime.strptime() is only available in Python 2.5+ so we have to
# parse the expected datetime manually.
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
self.assertTrue(
re.match,
'XML datettime string %s has incorrect format' % date_time_str)
date_time_from_xml = datetime.datetime(
year=int(match.group(1)), month=int(match.group(2)),
day=int(match.group(3)), hour=int(match.group(4)),
minute=int(match.group(5)), second=int(match.group(6)))
time_delta = abs(datetime.datetime.now() - date_time_from_xml)
# timestamp value should be near the current local time
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
'time_delta is %s' % time_delta)
actual.unlink()
def testDefaultOutputFile(self):
"""
Confirms that Google Test produces an XML output file with the expected
default name if no name is explicitly specified.
"""
output_file = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_DEFAULT_OUTPUT_FILE)
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
'gtest_no_test_unittest')
try:
os.remove(output_file)
except OSError, e:
if e.errno != errno.ENOENT:
raise
p = gtest_test_utils.Subprocess(
[gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG],
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
self.assert_(os.path.isfile(output_file))
def testSuppressedXmlOutput(self):
"""
Tests that no XML file is generated if the default XML listener is
shut down before RUN_ALL_TESTS is invoked.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_PROGRAM_NAME + 'out.xml')
if os.path.isfile(xml_path):
os.remove(xml_path)
command = [GTEST_PROGRAM_PATH,
'%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path),
'--shut_down_xml']
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
# p.signal is avalable only if p.terminated_by_signal is True.
self.assertFalse(
p.terminated_by_signal,
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(1, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, 1))
self.assert_(not os.path.isfile(xml_path))
def testFilteredTestXmlOutput(self):
"""Verifies XML output when a filter is applied.
Runs a test program that executes only some tests and verifies that
non-selected tests do not show up in the XML output.
"""
self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0,
extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
def _GetXmlOutput(self, gtest_prog_name, extra_args, expected_exit_code):
"""
Returns the xml output generated by running the program gtest_prog_name.
Furthermore, the program's exit code must be expected_exit_code.
"""
xml_path = os.path.join(gtest_test_utils.GetTempDir(),
gtest_prog_name + 'out.xml')
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] +
extra_args)
p = gtest_test_utils.Subprocess(command)
if p.terminated_by_signal:
self.assert_(False,
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
else:
self.assert_(p.exited)
self.assertEquals(expected_exit_code, p.exit_code,
"'%s' exited with code %s, which doesn't match "
'the expected exit code %s.'
% (command, p.exit_code, expected_exit_code))
actual = minidom.parse(xml_path)
return actual
def _TestXmlOutput(self, gtest_prog_name, expected_xml,
expected_exit_code, extra_args=None):
"""
Asserts that the XML document generated by running the program
gtest_prog_name matches expected_xml, a string containing another
XML document. Furthermore, the program's exit code must be
expected_exit_code.
"""
actual = self._GetXmlOutput(gtest_prog_name, extra_args or [],
expected_exit_code)
expected = minidom.parseString(expected_xml)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == '__main__':
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
gtest_test_utils.Main()
| mit |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/host/lib/python2.7/test/test_applesingle.py | 136 | 1810 | # Copyright (C) 2003 Python Software Foundation
import unittest
import os
from test import test_support
import struct
MacOS = test_support.import_module('MacOS')
# The following should exist if MacOS does.
import applesingle
AS_MAGIC=0x00051600
AS_VERSION=0x00020000
dataforkdata = 'hello\r\0world\n'
resourceforkdata = 'goodbye\ncruel\0world\r'
applesingledata = struct.pack(">ll16sh", AS_MAGIC, AS_VERSION, "foo", 2) + \
struct.pack(">llllll", 1, 50, len(dataforkdata),
2, 50+len(dataforkdata), len(resourceforkdata)) + \
dataforkdata + \
resourceforkdata
TESTFN2 = test_support.TESTFN + '2'
class TestApplesingle(unittest.TestCase):
def setUp(self):
fp = open(test_support.TESTFN, 'w')
fp.write(applesingledata)
fp.close()
def tearDown(self):
try:
os.unlink(test_support.TESTFN)
except:
pass
try:
os.unlink(TESTFN2)
except:
pass
def compareData(self, isrf, data):
if isrf:
fp = MacOS.openrf(TESTFN2, '*rb')
else:
fp = open(TESTFN2, 'rb')
filedata = fp.read(1000)
self.assertEqual(data, filedata)
def test_applesingle(self):
try:
os.unlink(TESTFN2)
except:
pass
applesingle.decode(test_support.TESTFN, TESTFN2)
self.compareData(False, dataforkdata)
self.compareData(True, resourceforkdata)
def test_applesingle_resonly(self):
try:
os.unlink(TESTFN2)
except:
pass
applesingle.decode(test_support.TESTFN, TESTFN2, resonly=True)
self.compareData(False, resourceforkdata)
def test_main():
test_support.run_unittest(TestApplesingle)
if __name__ == '__main__':
test_main()
| gpl-2.0 |
sounay/flaminggo-test | onadata/apps/logger/migrations/0020_auto__chg_field_instance_uuid.py | 13 | 8321 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Instance.uuid'
db.alter_column('odk_logger_instance', 'uuid', self.gf('django.db.models.fields.CharField')(max_length=249))
def backwards(self, orm):
# Changing field 'Instance.uuid'
db.alter_column('odk_logger_instance', 'uuid', self.gf('django.db.models.fields.CharField')(max_length=36))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'odk_logger.attachment': {
'Meta': {'object_name': 'Attachment'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['odk_logger.Instance']"}),
'media_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})
},
'odk_logger.instance': {
'Meta': {'object_name': 'Instance'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'submitted_via_web'", 'max_length': '20'}),
'survey_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['odk_logger.SurveyType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['odk_logger.XForm']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.instancehistory': {
'Meta': {'object_name': 'InstanceHistory'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'xform_instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'submission_history'", 'to': "orm['odk_logger.Instance']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.surveytype': {
'Meta': {'object_name': 'SurveyType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'odk_logger.xform': {
'Meta': {'ordering': "('id_string',)", 'unique_together': "(('user', 'id_string'),)", 'object_name': 'XForm'},
'bamboo_dataset': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '60'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'null': 'True'}),
'downloadable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'has_start_time': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_string': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'is_crowd_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'json': ('django.db.models.fields.TextField', [], {'default': "u''"}),
'shared': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shared_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'xforms'", 'null': 'True', 'to': "orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '32'}),
'xls': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}),
'xml': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['logger']
| bsd-2-clause |
nzavagli/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/pip-7.1.0/pip/_vendor/requests/packages/urllib3/response.py | 478 | 16459 | try:
import http.client as httplib
except ImportError:
import httplib
import zlib
import io
from socket import timeout as SocketTimeout
from ._collections import HTTPHeaderDict
from .exceptions import (
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
)
from .packages.six import string_types as basestring, binary_type, PY3
from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed
class DeflateDecoder(object):
def __init__(self):
self._first_try = True
self._data = binary_type()
self._obj = zlib.decompressobj()
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
if not self._first_try:
return self._obj.decompress(data)
self._data += data
try:
return self._obj.decompress(data)
except zlib.error:
self._first_try = False
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
try:
return self.decompress(self._data)
finally:
self._data = None
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
return self._obj.decompress(data)
def _get_decoder(mode):
if mode == 'gzip':
return GzipDecoder()
return DeflateDecoder()
class HTTPResponse(io.IOBase):
"""
HTTP Response container.
Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
loaded and decoded on-demand when the ``data`` property is accessed. This
class is also compatible with the Python standard library's :mod:`io`
module, and can hence be treated as a readable object in the context of that
framework.
Extra parameters for behaviour not present in httplib.HTTPResponse:
:param preload_content:
If True, the response's body will be preloaded during construction.
:param decode_content:
If True, attempts to decode specific content-encoding's based on headers
(like 'gzip' and 'deflate') will be skipped and raw data will be used
instead.
:param original_response:
When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
object, it's convenient to include the original for debug purposes. It's
otherwise unused.
"""
CONTENT_DECODERS = ['gzip', 'deflate']
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
def __init__(self, body='', headers=None, status=0, version=0, reason=None,
strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None):
if isinstance(headers, HTTPHeaderDict):
self.headers = headers
else:
self.headers = HTTPHeaderDict(headers)
self.status = status
self.version = version
self.reason = reason
self.strict = strict
self.decode_content = decode_content
self._decoder = None
self._body = None
self._fp = None
self._original_response = original_response
self._fp_bytes_read = 0
if body and isinstance(body, (basestring, binary_type)):
self._body = body
self._pool = pool
self._connection = connection
if hasattr(body, 'read'):
self._fp = body
# Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
tr_enc = self.headers.get('transfer-encoding', '').lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
self.chunked = True
# We certainly don't want to preload content when the response is chunked.
if not self.chunked and preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
"""
Should we redirect and where to?
:returns: Truthy redirect location string if we got a redirect status
code and valid location. ``None`` if redirect status and no
location. ``False`` if not a redirect status code.
"""
if self.status in self.REDIRECT_STATUSES:
return self.headers.get('location')
return False
def release_conn(self):
if not self._pool or not self._connection:
return
self._pool._put_conn(self._connection)
self._connection = None
@property
def data(self):
# For backwords-compat with earlier urllib3 0.4 and earlier.
if self._body:
return self._body
if self._fp:
return self.read(cache_content=True)
def tell(self):
"""
Obtain the number of bytes pulled over the wire so far. May differ from
the amount of content returned by :meth:``HTTPResponse.read`` if bytes
are encoded on the wire (e.g, compressed).
"""
return self._fp_bytes_read
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessar.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
content_encoding = self.headers.get('content-encoding', '').lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
data += buf + self._decoder.flush()
return data
def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
:param amt:
How much of the content to read. If specified, caching is skipped
because it doesn't make sense to cache partial content as the full
response.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
:param cache_content:
If True, will save the returned data such that the same result is
returned despite of the state of the underlying file object. This
is useful if you want the ``.data`` property to continue working
after having ``.read()`` the file object. (Overridden if ``amt`` is
set.)
"""
self._init_decoder()
if decode_content is None:
decode_content = self.decode_content
if self._fp is None:
return
flush_decoder = False
try:
try:
if amt is None:
# cStringIO doesn't like amt=None
data = self._fp.read()
flush_decoder = True
else:
cache_content = False
data = self._fp.read(amt)
if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
# Close the connection when no data is returned
#
# This is redundant to what httplib/http.client _should_
# already do. However, versions of python released before
# December 15, 2012 (http://bugs.python.org/issue16298) do
# not properly close the connection in all cases. There is
# no harm in redundantly calling close.
self._fp.close()
flush_decoder = True
except SocketTimeout:
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
# there is yet no clean way to get at it from this context.
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors?
if 'read operation timed out' not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors.
raise
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
except HTTPException as e:
# This includes IncompleteRead.
raise ProtocolError('Connection broken: %r' % e, e)
self._fp_bytes_read += len(data)
data = self._decode(data, decode_content, flush_decoder)
if cache_content:
self._body = data
return data
finally:
if self._original_response and self._original_response.isclosed():
self.release_conn()
def stream(self, amt=2**16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
``amt`` bytes have been read from the connection or until the
connection is closed.
:param amt:
How much of the content to read. The generator will return up to
much data per iteration, but may return less. This is particularly
likely when using compressed data. However, the empty string will
never be returned.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
if self.chunked:
for line in self.read_chunked(amt, decode_content=decode_content):
yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data:
yield data
@classmethod
def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``.
"""
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
if PY3: # Python 3
headers = HTTPHeaderDict(headers.items())
else: # Python 2
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
resp = ResponseCls(body=r,
headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
strict=strict,
original_response=r,
**response_kw)
return resp
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
return self.headers
def getheader(self, name, default=None):
return self.headers.get(name, default)
# Overrides from io.IOBase
def close(self):
if not self.closed:
self._fp.close()
@property
def closed(self):
if self._fp is None:
return True
elif hasattr(self._fp, 'closed'):
return self._fp.closed
elif hasattr(self._fp, 'isclosed'): # Python 2
return self._fp.isclosed()
else:
return True
def fileno(self):
if self._fp is None:
raise IOError("HTTPResponse has no file to get a fileno from")
elif hasattr(self._fp, "fileno"):
return self._fp.fileno()
else:
raise IOError("The file-like object this HTTPResponse is wrapped "
"around has no file descriptor")
def flush(self):
if self._fp is not None and hasattr(self._fp, 'flush'):
return self._fp.flush()
def readable(self):
# This method is required for `io` module compatibility.
return True
def readinto(self, b):
# This method is required for `io` module compatibility.
temp = self.read(len(b))
if len(temp) == 0:
return 0
else:
b[:len(temp)] = temp
return len(temp)
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b';', 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise httplib.IncompleteRead(line)
def _handle_chunk(self, amt):
returned_chunk = None
if amt is None:
chunk = self._fp._safe_read(self.chunk_left)
returned_chunk = chunk
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
elif amt < self.chunk_left:
value = self._fp._safe_read(amt)
self.chunk_left = self.chunk_left - amt
returned_chunk = value
elif amt == self.chunk_left:
value = self._fp._safe_read(amt)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
returned_chunk = value
else: # amt > self.chunk_left
returned_chunk = self._fp._safe_read(self.chunk_left)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
return returned_chunk
def read_chunked(self, amt=None, decode_content=None):
"""
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked("Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing.")
if self._original_response and self._original_response._method.upper() == 'HEAD':
# Don't bother reading the body of a HEAD request.
# FIXME: Can we do this somehow without accessing private httplib _method?
self._original_response.close()
return
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
yield self._decode(chunk, decode_content=decode_content,
flush_decoder=True)
# Chunk content ends with \r\n: discard it.
while True:
line = self._fp.fp.readline()
if not line:
# Some sites may not end with '\r\n'.
break
if line == b'\r\n':
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
self.release_conn()
| mit |
cysnake4713/odoo | addons/account/__openerp__.py | 33 | 7683 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'eInvoicing',
'version' : '1.1',
'author' : 'OpenERP SA',
'category' : 'Accounting & Finance',
'description' : """
Accounting and Financial Management.
====================================
Financial and accounting module that covers:
--------------------------------------------
* General Accounting
* Cost/Analytic accounting
* Third party accounting
* Taxes management
* Budgets
* Customer and Supplier Invoices
* Bank statements
* Reconciliation process by partner
Creates a dashboard for accountants that includes:
--------------------------------------------------
* List of Customer Invoices to Approve
* Company Analysis
* Graph of Treasury
Processes like maintaining general ledgers are done through the defined Financial Journals (entry move line or grouping is maintained through a journal)
for a particular financial year and for preparation of vouchers there is a module named account_voucher.
""",
'website': 'http://www.openerp.com',
'images' : ['images/accounts.jpeg','images/bank_statement.jpeg','images/cash_register.jpeg','images/chart_of_accounts.jpeg','images/customer_invoice.jpeg','images/journal_entries.jpeg'],
'depends' : ['base_setup', 'product', 'analytic', 'board', 'edi', 'report'],
'data': [
'security/account_security.xml',
'security/ir.model.access.csv',
'account_menuitem.xml',
'report/account_invoice_report_view.xml',
'report/account_entries_report_view.xml',
'report/account_treasury_report_view.xml',
'report/account_report_view.xml',
'report/account_analytic_entries_report_view.xml',
'wizard/account_move_bank_reconcile_view.xml',
'wizard/account_use_model_view.xml',
'account_installer.xml',
'wizard/account_period_close_view.xml',
'wizard/account_reconcile_view.xml',
'wizard/account_unreconcile_view.xml',
'wizard/account_statement_from_invoice_view.xml',
'account_view.xml',
'account_report.xml',
'account_financial_report_data.xml',
'wizard/account_report_common_view.xml',
'wizard/account_invoice_refund_view.xml',
'wizard/account_fiscalyear_close_state.xml',
'wizard/account_chart_view.xml',
'wizard/account_tax_chart_view.xml',
'wizard/account_move_line_reconcile_select_view.xml',
'wizard/account_open_closed_fiscalyear_view.xml',
'wizard/account_move_line_unreconcile_select_view.xml',
'wizard/account_vat_view.xml',
'wizard/account_report_print_journal_view.xml',
'wizard/account_report_general_journal_view.xml',
'wizard/account_report_central_journal_view.xml',
'wizard/account_subscription_generate_view.xml',
'wizard/account_fiscalyear_close_view.xml',
'wizard/account_state_open_view.xml',
'wizard/account_journal_select_view.xml',
'wizard/account_change_currency_view.xml',
'wizard/account_validate_move_view.xml',
'wizard/account_report_general_ledger_view.xml',
'wizard/account_invoice_state_view.xml',
'wizard/account_report_partner_balance_view.xml',
'wizard/account_report_account_balance_view.xml',
'wizard/account_report_aged_partner_balance_view.xml',
'wizard/account_report_partner_ledger_view.xml',
'wizard/account_reconcile_partner_process_view.xml',
'wizard/account_automatic_reconcile_view.xml',
'wizard/account_financial_report_view.xml',
'wizard/pos_box.xml',
'project/wizard/project_account_analytic_line_view.xml',
'account_end_fy.xml',
'account_invoice_view.xml',
'data/account_data.xml',
'data/data_account_type.xml',
'data/configurable_account_chart.xml',
'account_invoice_workflow.xml',
'project/project_view.xml',
'project/project_report.xml',
'project/wizard/account_analytic_balance_report_view.xml',
'project/wizard/account_analytic_cost_ledger_view.xml',
'project/wizard/account_analytic_inverted_balance_report.xml',
'project/wizard/account_analytic_journal_report_view.xml',
'project/wizard/account_analytic_cost_ledger_for_journal_report_view.xml',
'project/wizard/account_analytic_chart_view.xml',
'partner_view.xml',
'product_view.xml',
'account_assert_test.xml',
'ir_sequence_view.xml',
'company_view.xml',
'edi/invoice_action_data.xml',
'account_bank_view.xml',
'res_config_view.xml',
'account_pre_install.yml',
'views/report_vat.xml',
'views/report_invoice.xml',
'views/report_trialbalance.xml',
'views/report_centraljournal.xml',
'views/report_overdue.xml',
'views/report_generaljournal.xml',
'views/report_journal.xml',
'views/report_salepurchasejournal.xml',
'views/report_partnerbalance.xml',
'views/report_agedpartnerbalance.xml',
'views/report_partnerledger.xml',
'views/report_partnerledgerother.xml',
'views/report_financial.xml',
'views/report_generalledger.xml',
'project/views/report_analyticbalance.xml',
'project/views/report_analyticjournal.xml',
'project/views/report_analyticcostledgerquantity.xml',
'project/views/report_analyticcostledger.xml',
'project/views/report_invertedanalyticbalance.xml',
'views/account.xml',
],
'qweb' : [
"static/src/xml/account_move_reconciliation.xml",
"static/src/xml/account_move_line_quickadd.xml",
"static/src/xml/account_bank_statement_reconciliation.xml",
],
'demo': [
'demo/account_demo.xml',
'project/project_demo.xml',
'project/analytic_account_demo.xml',
'demo/account_minimal.xml',
'demo/account_invoice_demo.xml',
'demo/account_bank_statement.xml',
'account_unit_test.xml',
],
'test': [
'test/account_test_users.yml',
'test/account_customer_invoice.yml',
'test/account_supplier_invoice.yml',
'test/account_change_currency.yml',
'test/chart_of_account.yml',
'test/account_period_close.yml',
'test/account_use_model.yml',
'test/account_validate_account_move.yml',
'test/test_edi_invoice.yml',
'test/account_report.yml',
'test/account_fiscalyear_close.yml', #last test, as it will definitively close the demo fiscalyear
],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
alistairlow/tensorflow | tensorflow/python/ops/distributions/transformed_distribution.py | 8 | 26280 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A Transformed Distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
# Bijectors must be directly imported because `remove_undocumented` prevents
# individual file imports.
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import distribution as distribution_lib
from tensorflow.python.ops.distributions import identity_bijector
from tensorflow.python.ops.distributions import util as distribution_util
__all__ = [
"TransformedDistribution",
]
# The following helper functions attempt to statically perform a TF operation.
# These functions make debugging easier since we can do more validation during
# graph construction.
def _static_value(x):
"""Returns the static value of a `Tensor` or `None`."""
return tensor_util.constant_value(ops.convert_to_tensor(x))
def _logical_and(*args):
"""Convenience function which attempts to statically `reduce_all`."""
args_ = [_static_value(x) for x in args]
if any(x is not None and not bool(x) for x in args_):
return constant_op.constant(False)
if all(x is not None and bool(x) for x in args_):
return constant_op.constant(True)
if len(args) == 2:
return math_ops.logical_and(*args)
return math_ops.reduce_all(args)
def _logical_equal(x, y):
"""Convenience function which attempts to statically compute `x == y`."""
x_ = _static_value(x)
y_ = _static_value(y)
if x_ is None or y_ is None:
return math_ops.equal(x, y)
return constant_op.constant(np.array_equal(x_, y_))
def _logical_not(x):
"""Convenience function which attempts to statically apply `logical_not`."""
x_ = _static_value(x)
if x_ is None:
return math_ops.logical_not(x)
return constant_op.constant(np.logical_not(x_))
def _concat_vectors(*args):
"""Convenience function which concatenates input vectors."""
args_ = [_static_value(x) for x in args]
if any(x_ is None for x_ in args_):
return array_ops.concat(args, 0)
return constant_op.constant([x_ for vec_ in args_ for x_ in vec_])
def _pick_scalar_condition(pred, cond_true, cond_false):
"""Convenience function which chooses the condition based on the predicate."""
# Note: This function is only valid if all of pred, cond_true, and cond_false
# are scalars. This means its semantics are arguably more like tf.cond than
# tf.select even though we use tf.select to implement it.
pred_ = _static_value(pred)
if pred_ is None:
return array_ops.where(pred, cond_true, cond_false)
return cond_true if pred_ else cond_false
def _ones_like(x):
"""Convenience function attempts to statically construct `ones_like`."""
# Should only be used for small vectors.
if x.get_shape().is_fully_defined():
return array_ops.ones(x.get_shape().as_list(), dtype=x.dtype)
return array_ops.ones_like(x)
def _ndims_from_shape(shape):
"""Returns `Tensor`'s `rank` implied by a `Tensor` shape."""
if shape.get_shape().ndims not in (None, 1):
raise ValueError("input is not a valid shape: not 1D")
if not shape.dtype.is_integer:
raise TypeError("input is not a valid shape: wrong dtype")
if shape.get_shape().is_fully_defined():
return constant_op.constant(shape.get_shape().as_list()[0])
return array_ops.shape(shape)[0]
def _is_scalar_from_shape(shape):
"""Returns `True` `Tensor` if `Tensor` shape implies a scalar."""
return _logical_equal(_ndims_from_shape(shape), 0)
class TransformedDistribution(distribution_lib.Distribution):
"""A Transformed Distribution.
A `TransformedDistribution` models `p(y)` given a base distribution `p(x)`,
and a deterministic, invertible, differentiable transform, `Y = g(X)`. The
transform is typically an instance of the `Bijector` class and the base
distribution is typically an instance of the `Distribution` class.
A `Bijector` is expected to implement the following functions:
- `forward`,
- `inverse`,
- `inverse_log_det_jacobian`.
The semantics of these functions are outlined in the `Bijector` documentation.
We now describe how a `TransformedDistribution` alters the input/outputs of a
`Distribution` associated with a random variable (rv) `X`.
Write `cdf(Y=y)` for an absolutely continuous cumulative distribution function
of random variable `Y`; write the probability density function `pdf(Y=y) :=
d^k / (dy_1,...,dy_k) cdf(Y=y)` for its derivative wrt to `Y` evaluated at
`y`. Assume that `Y = g(X)` where `g` is a deterministic diffeomorphism,
i.e., a non-random, continuous, differentiable, and invertible function.
Write the inverse of `g` as `X = g^{-1}(Y)` and `(J o g)(x)` for the Jacobian
of `g` evaluated at `x`.
A `TransformedDistribution` implements the following operations:
* `sample`
Mathematically: `Y = g(X)`
Programmatically: `bijector.forward(distribution.sample(...))`
* `log_prob`
Mathematically: `(log o pdf)(Y=y) = (log o pdf o g^{-1})(y)
+ (log o abs o det o J o g^{-1})(y)`
Programmatically: `(distribution.log_prob(bijector.inverse(y))
+ bijector.inverse_log_det_jacobian(y))`
* `log_cdf`
Mathematically: `(log o cdf)(Y=y) = (log o cdf o g^{-1})(y)`
Programmatically: `distribution.log_cdf(bijector.inverse(x))`
* and similarly for: `cdf`, `prob`, `log_survival_function`,
`survival_function`.
A simple example constructing a Log-Normal distribution from a Normal
distribution:
```python
ds = tf.contrib.distributions
log_normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Exp(),
name="LogNormalTransformedDistribution")
```
A `LogNormal` made from callables:
```python
ds = tf.contrib.distributions
log_normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Inline(
forward_fn=tf.exp,
inverse_fn=tf.log,
inverse_log_det_jacobian_fn=(
lambda y: -tf.reduce_sum(tf.log(y), axis=-1)),
name="LogNormalTransformedDistribution")
```
Another example constructing a Normal from a StandardNormal:
```python
ds = tf.contrib.distributions
normal = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Affine(
shift=-1.,
scale_identity_multiplier=2.,
event_ndims=0),
name="NormalTransformedDistribution")
```
A `TransformedDistribution`'s batch- and event-shape are implied by the base
distribution unless explicitly overridden by `batch_shape` or `event_shape`
arguments. Specifying an overriding `batch_shape` (`event_shape`) is
permitted only if the base distribution has scalar batch-shape (event-shape).
The bijector is applied to the distribution as if the distribution possessed
the overridden shape(s). The following example demonstrates how to construct a
multivariate Normal as a `TransformedDistribution`.
```python
ds = tf.contrib.distributions
# We will create two MVNs with batch_shape = event_shape = 2.
mean = [[-1., 0], # batch:0
[0., 1]] # batch:1
chol_cov = [[[1., 0],
[0, 1]], # batch:0
[[1, 0],
[2, 2]]] # batch:1
mvn1 = ds.TransformedDistribution(
distribution=ds.Normal(loc=0., scale=1.),
bijector=ds.bijectors.Affine(shift=mean, scale_tril=chol_cov),
batch_shape=[2], # Valid because base_distribution.batch_shape == [].
event_shape=[2]) # Valid because base_distribution.event_shape == [].
mvn2 = ds.MultivariateNormalTriL(loc=mean, scale_tril=chol_cov)
# mvn1.log_prob(x) == mvn2.log_prob(x)
```
"""
def __init__(self,
distribution,
bijector=None,
batch_shape=None,
event_shape=None,
validate_args=False,
name=None):
"""Construct a Transformed Distribution.
Args:
distribution: The base distribution instance to transform. Typically an
instance of `Distribution`.
bijector: The object responsible for calculating the transformation.
Typically an instance of `Bijector`. `None` means `Identity()`.
batch_shape: `integer` vector `Tensor` which overrides `distribution`
`batch_shape`; valid only if `distribution.is_scalar_batch()`.
event_shape: `integer` vector `Tensor` which overrides `distribution`
`event_shape`; valid only if `distribution.is_scalar_event()`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
name: Python `str` name prefixed to Ops created by this class. Default:
`bijector.name + distribution.name`.
"""
parameters = locals()
name = name or (("" if bijector is None else bijector.name) +
distribution.name)
with ops.name_scope(name, values=[event_shape, batch_shape]):
# For convenience we define some handy constants.
self._zero = constant_op.constant(0, dtype=dtypes.int32, name="zero")
self._empty = constant_op.constant([], dtype=dtypes.int32, name="empty")
if bijector is None:
bijector = identity_bijector.Identity(validate_args=validate_args)
# We will keep track of a static and dynamic version of
# self._is_{batch,event}_override. This way we can do more prior to graph
# execution, including possibly raising Python exceptions.
self._override_batch_shape = self._maybe_validate_shape_override(
batch_shape, distribution.is_scalar_batch(), validate_args,
"batch_shape")
self._is_batch_override = _logical_not(_logical_equal(
_ndims_from_shape(self._override_batch_shape), self._zero))
self._is_maybe_batch_override = bool(
tensor_util.constant_value(self._override_batch_shape) is None or
tensor_util.constant_value(self._override_batch_shape).size != 0)
self._override_event_shape = self._maybe_validate_shape_override(
event_shape, distribution.is_scalar_event(), validate_args,
"event_shape")
self._is_event_override = _logical_not(_logical_equal(
_ndims_from_shape(self._override_event_shape), self._zero))
self._is_maybe_event_override = bool(
tensor_util.constant_value(self._override_event_shape) is None or
tensor_util.constant_value(self._override_event_shape).size != 0)
# To convert a scalar distribution into a multivariate distribution we
# will draw dims from the sample dims, which are otherwise iid. This is
# easy to do except in the case that the base distribution has batch dims
# and we're overriding event shape. When that case happens the event dims
# will incorrectly be to the left of the batch dims. In this case we'll
# cyclically permute left the new dims.
self._needs_rotation = _logical_and(
self._is_event_override,
_logical_not(self._is_batch_override),
_logical_not(distribution.is_scalar_batch()))
override_event_ndims = _ndims_from_shape(self._override_event_shape)
self._rotate_ndims = _pick_scalar_condition(
self._needs_rotation, override_event_ndims, 0)
# We'll be reducing the head dims (if at all), i.e., this will be []
# if we don't need to reduce.
self._reduce_event_indices = math_ops.range(
self._rotate_ndims - override_event_ndims, self._rotate_ndims)
self._distribution = distribution
self._bijector = bijector
super(TransformedDistribution, self).__init__(
dtype=self._distribution.dtype,
reparameterization_type=self._distribution.reparameterization_type,
validate_args=validate_args,
allow_nan_stats=self._distribution.allow_nan_stats,
parameters=parameters,
# We let TransformedDistribution access _graph_parents since this class
# is more like a baseclass than derived.
graph_parents=(distribution._graph_parents + # pylint: disable=protected-access
bijector.graph_parents),
name=name)
@property
def distribution(self):
"""Base distribution, p(x)."""
return self._distribution
@property
def bijector(self):
"""Function transforming x => y."""
return self._bijector
def _event_shape_tensor(self):
return self.bijector.forward_event_shape_tensor(
distribution_util.pick_vector(
self._is_event_override,
self._override_event_shape,
self.distribution.event_shape_tensor()))
def _event_shape(self):
# If there's a chance that the event_shape has been overridden, we return
# what we statically know about the `event_shape_override`. This works
# because: `_is_maybe_event_override` means `static_override` is `None` or a
# non-empty list, i.e., we don't statically know the `event_shape` or we do.
#
# Since the `bijector` may change the `event_shape`, we then forward what we
# know to the bijector. This allows the `bijector` to have final say in the
# `event_shape`.
static_override = tensor_util.constant_value_as_shape(
self._override_event_shape)
return self.bijector.forward_event_shape(
static_override
if self._is_maybe_event_override
else self.distribution.event_shape)
def _batch_shape_tensor(self):
return distribution_util.pick_vector(
self._is_batch_override,
self._override_batch_shape,
self.distribution.batch_shape_tensor())
def _batch_shape(self):
# If there's a chance that the batch_shape has been overridden, we return
# what we statically know about the `batch_shape_override`. This works
# because: `_is_maybe_batch_override` means `static_override` is `None` or a
# non-empty list, i.e., we don't statically know the `batch_shape` or we do.
#
# Notice that this implementation parallels the `_event_shape` except that
# the `bijector` doesn't get to alter the `batch_shape`. Recall that
# `batch_shape` is a property of a distribution while `event_shape` is
# shared between both the `distribution` instance and the `bijector`.
static_override = tensor_util.constant_value_as_shape(
self._override_batch_shape)
return (static_override
if self._is_maybe_batch_override
else self.distribution.batch_shape)
def _sample_n(self, n, seed=None):
sample_shape = _concat_vectors(
distribution_util.pick_vector(self._needs_rotation, self._empty, [n]),
self._override_batch_shape,
self._override_event_shape,
distribution_util.pick_vector(self._needs_rotation, [n], self._empty))
x = self.distribution.sample(sample_shape=sample_shape, seed=seed)
x = self._maybe_rotate_dims(x)
# We'll apply the bijector in the `_call_sample_n` function.
return x
def _call_sample_n(self, sample_shape, seed, name, **kwargs):
# We override `_call_sample_n` rather than `_sample_n` so we can ensure that
# the result of `self.bijector.forward` is not modified (and thus caching
# works).
with self._name_scope(name, values=[sample_shape]):
sample_shape = ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32, name="sample_shape")
sample_shape, n = self._expand_sample_shape_to_vector(
sample_shape, "sample_shape")
# First, generate samples. We will possibly generate extra samples in the
# event that we need to reinterpret the samples as part of the
# event_shape.
x = self._sample_n(n, seed, **kwargs)
# Next, we reshape `x` into its final form. We do this prior to the call
# to the bijector to ensure that the bijector caching works.
batch_event_shape = array_ops.shape(x)[1:]
final_shape = array_ops.concat([sample_shape, batch_event_shape], 0)
x = array_ops.reshape(x, final_shape)
# Finally, we apply the bijector's forward transformation. For caching to
# work, it is imperative that this is the last modification to the
# returned result.
y = self.bijector.forward(x, **kwargs)
y = self._set_sample_static_shape(y, sample_shape)
return y
def _log_prob(self, y):
# For caching to work, it is imperative that the bijector is the first to
# modify the input.
x = self.bijector.inverse(y)
ildj = self.bijector.inverse_log_det_jacobian(y)
if self.bijector._is_injective: # pylint: disable=protected-access
return self._finish_log_prob_for_one_fiber(y, x, ildj)
lp_on_fibers = [
self._finish_log_prob_for_one_fiber(y, x_i, ildj_i)
for x_i, ildj_i in zip(x, ildj)]
return math_ops.reduce_logsumexp(array_ops.stack(lp_on_fibers), axis=0)
def _finish_log_prob_for_one_fiber(self, y, x, ildj):
"""Finish computation of log_prob on one element of the inverse image."""
x = self._maybe_rotate_dims(x, rotate_right=True)
log_prob = self.distribution.log_prob(x)
if self._is_maybe_event_override:
log_prob = math_ops.reduce_sum(log_prob, self._reduce_event_indices)
log_prob = ildj + log_prob
if self._is_maybe_event_override:
log_prob.set_shape(array_ops.broadcast_static_shape(
y.get_shape().with_rank_at_least(1)[:-1], self.batch_shape))
return log_prob
def _prob(self, y):
x = self.bijector.inverse(y)
ildj = self.bijector.inverse_log_det_jacobian(y)
if self.bijector._is_injective: # pylint: disable=protected-access
return self._finish_prob_for_one_fiber(y, x, ildj)
prob_on_fibers = [
self._finish_prob_for_one_fiber(y, x_i, ildj_i)
for x_i, ildj_i in zip(x, ildj)]
return sum(prob_on_fibers)
def _finish_prob_for_one_fiber(self, y, x, ildj):
"""Finish computation of prob on one element of the inverse image."""
x = self._maybe_rotate_dims(x, rotate_right=True)
prob = self.distribution.prob(x)
if self._is_maybe_event_override:
prob = math_ops.reduce_prod(prob, self._reduce_event_indices)
prob *= math_ops.exp(ildj)
if self._is_maybe_event_override:
prob.set_shape(array_ops.broadcast_static_shape(
y.get_shape().with_rank_at_least(1)[:-1], self.batch_shape))
return prob
def _log_cdf(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("log_cdf is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("log_cdf is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.log_cdf(x)
def _cdf(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("cdf is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("cdf is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.cdf(x)
def _log_survival_function(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("log_survival_function is not implemented when "
"overriding event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("log_survival_function is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.log_survival_function(x)
def _survival_function(self, y):
if self._is_maybe_event_override:
raise NotImplementedError("survival_function is not implemented when "
"overriding event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("survival_function is not implemented when "
"bijector is not injective.")
x = self.bijector.inverse(y)
return self.distribution.survival_function(x)
def _quantile(self, value):
if self._is_maybe_event_override:
raise NotImplementedError("quantile is not implemented when overriding "
"event_shape")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("quantile is not implemented when "
"bijector is not injective.")
# x_q is the "qth quantile" of X iff q = P[X <= x_q]. Now, since X =
# g^{-1}(Y), q = P[X <= x_q] = P[g^{-1}(Y) <= x_q] = P[Y <= g(x_q)],
# implies the qth quantile of Y is g(x_q).
inv_cdf = self.distribution.quantile(value)
return self.bijector.forward(inv_cdf)
def _entropy(self):
if not self.bijector.is_constant_jacobian:
raise NotImplementedError("entropy is not implemented")
if not self.bijector._is_injective: # pylint: disable=protected-access
raise NotImplementedError("entropy is not implemented when "
"bijector is not injective.")
# Suppose Y = g(X) where g is a diffeomorphism and X is a continuous rv. It
# can be shown that:
# H[Y] = H[X] + E_X[(log o abs o det o J o g)(X)].
# If is_constant_jacobian then:
# E_X[(log o abs o det o J o g)(X)] = (log o abs o det o J o g)(c)
# where c can by anything.
entropy = self.distribution.entropy()
if self._is_maybe_event_override:
# H[X] = sum_i H[X_i] if X_i are mutually independent.
# This means that a reduce_sum is a simple rescaling.
entropy *= math_ops.cast(math_ops.reduce_prod(self._override_event_shape),
dtype=entropy.dtype.base_dtype)
if self._is_maybe_batch_override:
new_shape = array_ops.concat([
_ones_like(self._override_batch_shape),
self.distribution.batch_shape_tensor()
], 0)
entropy = array_ops.reshape(entropy, new_shape)
multiples = array_ops.concat([
self._override_batch_shape,
_ones_like(self.distribution.batch_shape_tensor())
], 0)
entropy = array_ops.tile(entropy, multiples)
dummy = array_ops.zeros([], self.dtype)
entropy -= self.bijector.inverse_log_det_jacobian(dummy)
entropy.set_shape(self.batch_shape)
return entropy
def _maybe_validate_shape_override(self, override_shape, base_is_scalar,
validate_args, name):
"""Helper to __init__ which ensures override batch/event_shape are valid."""
if override_shape is None:
override_shape = []
override_shape = ops.convert_to_tensor(override_shape, dtype=dtypes.int32,
name=name)
if not override_shape.dtype.is_integer:
raise TypeError("shape override must be an integer")
override_is_scalar = _is_scalar_from_shape(override_shape)
if tensor_util.constant_value(override_is_scalar):
return self._empty
dynamic_assertions = []
if override_shape.get_shape().ndims is not None:
if override_shape.get_shape().ndims != 1:
raise ValueError("shape override must be a vector")
elif validate_args:
dynamic_assertions += [check_ops.assert_rank(
override_shape, 1,
message="shape override must be a vector")]
if tensor_util.constant_value(override_shape) is not None:
if any(s <= 0 for s in tensor_util.constant_value(override_shape)):
raise ValueError("shape override must have positive elements")
elif validate_args:
dynamic_assertions += [check_ops.assert_positive(
override_shape,
message="shape override must have positive elements")]
is_both_nonscalar = _logical_and(_logical_not(base_is_scalar),
_logical_not(override_is_scalar))
if tensor_util.constant_value(is_both_nonscalar) is not None:
if tensor_util.constant_value(is_both_nonscalar):
raise ValueError("base distribution not scalar")
elif validate_args:
dynamic_assertions += [check_ops.assert_equal(
is_both_nonscalar, False,
message="base distribution not scalar")]
if not dynamic_assertions:
return override_shape
return control_flow_ops.with_dependencies(
dynamic_assertions, override_shape)
def _maybe_rotate_dims(self, x, rotate_right=False):
"""Helper which rolls left event_dims left or right event_dims right."""
needs_rotation_const = tensor_util.constant_value(self._needs_rotation)
if needs_rotation_const is not None and not needs_rotation_const:
return x
ndims = array_ops.rank(x)
n = (ndims - self._rotate_ndims) if rotate_right else self._rotate_ndims
return array_ops.transpose(
x, _concat_vectors(math_ops.range(n, ndims), math_ops.range(0, n)))
| apache-2.0 |
staticlibs/android-ndk-r9d-arm-linux-androideabi-4.8 | lib/python2.7/test/test_select.py | 84 | 2163 | from test import test_support
import unittest
import select
import os
import sys
@unittest.skipIf(sys.platform[:3] in ('win', 'os2', 'riscos'),
"can't easily test on this system")
class SelectTestCase(unittest.TestCase):
class Nope:
pass
class Almost:
def fileno(self):
return 'fileno'
def test_error_conditions(self):
self.assertRaises(TypeError, select.select, 1, 2, 3)
self.assertRaises(TypeError, select.select, [self.Nope()], [], [])
self.assertRaises(TypeError, select.select, [self.Almost()], [], [])
self.assertRaises(TypeError, select.select, [], [], [], "not a number")
def test_returned_list_identity(self):
# See issue #8329
r, w, x = select.select([], [], [], 1)
self.assertIsNot(r, w)
self.assertIsNot(r, x)
self.assertIsNot(w, x)
def test_select(self):
cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 1; done'
p = os.popen(cmd, 'r')
for tout in (0, 1, 2, 4, 8, 16) + (None,)*10:
if test_support.verbose:
print 'timeout =', tout
rfd, wfd, xfd = select.select([p], [], [], tout)
if (rfd, wfd, xfd) == ([], [], []):
continue
if (rfd, wfd, xfd) == ([p], [], []):
line = p.readline()
if test_support.verbose:
print repr(line)
if not line:
if test_support.verbose:
print 'EOF'
break
continue
self.fail('Unexpected return values from select():', rfd, wfd, xfd)
p.close()
# Issue 16230: Crash on select resized list
def test_select_mutated(self):
a = []
class F:
def fileno(self):
del a[-1]
return sys.__stdout__.fileno()
a[:] = [F()] * 10
self.assertEqual(select.select([], a, []), ([], a[:5], []))
def test_main():
test_support.run_unittest(SelectTestCase)
test_support.reap_children()
if __name__ == "__main__":
test_main()
| gpl-2.0 |
kevinlondon/youtube-dl | youtube_dl/extractor/motorsport.py | 129 | 1797 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import (
compat_urlparse,
)
class MotorsportIE(InfoExtractor):
IE_DESC = 'motorsport.com'
_VALID_URL = r'http://www\.motorsport\.com/[^/?#]+/video/(?:[^/?#]+/)(?P<id>[^/]+)/?(?:$|[?#])'
_TEST = {
'url': 'http://www.motorsport.com/f1/video/main-gallery/red-bull-racing-2014-rules-explained/',
'info_dict': {
'id': '2-T3WuR-KMM',
'ext': 'mp4',
'title': 'Red Bull Racing: 2014 Rules Explained',
'duration': 208,
'description': 'A new clip from Red Bull sees Daniel Ricciardo and Sebastian Vettel explain the 2014 Formula One regulations – which are arguably the most complex the sport has ever seen.',
'uploader': 'mcomstaff',
'uploader_id': 'UC334JIYKkVnyFoNCclfZtHQ',
'upload_date': '20140903',
'thumbnail': r're:^https?://.+\.jpg$'
},
'add_ie': ['Youtube'],
'params': {
'skip_download': True,
},
}
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
iframe_path = self._html_search_regex(
r'<iframe id="player_iframe"[^>]+src="([^"]+)"', webpage,
'iframe path')
iframe = self._download_webpage(
compat_urlparse.urljoin(url, iframe_path), display_id,
'Downloading iframe')
youtube_id = self._search_regex(
r'www.youtube.com/embed/(.{11})', iframe, 'youtube id')
return {
'_type': 'url_transparent',
'display_id': display_id,
'url': 'https://youtube.com/watch?v=%s' % youtube_id,
}
| unlicense |
rspavel/spack | var/spack/repos/builtin/packages/branson/package.py | 3 | 1683 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Branson(CMakePackage):
"""Branson's purpose is to study different algorithms for parallel Monte
Carlo transport. Currently it contains particle passing and mesh passing
methods for domain decomposition."""
homepage = "https://github.com/lanl/branson"
url = "https://github.com/lanl/branson/archive/0.82.tar.gz"
git = "https://github.com/lanl/branson.git"
tags = ['proxy-app']
version('develop', branch='develop')
version('0.82', sha256='7d83d41d0c7ab9c1c906a902165af31182da4604dd0b69aec28d709fe4d7a6ec',
preferred=True)
version('0.81', sha256='493f720904791f06b49ff48c17a681532c6a4d9fa59636522cf3f9700e77efe4')
version('0.8', sha256='85ffee110f89be00c37798700508b66b0d15de1d98c54328b6d02a9eb2cf1cb8')
depends_on('mpi@2:')
depends_on('boost', when='@:0.81')
depends_on('metis')
depends_on('parmetis', when='@:0.82')
root_cmakelists_dir = 'src'
def cmake_args(self):
spec = self.spec
args = []
args.append('-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc)
args.append('-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx)
args.append('-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc)
return args
def install(self, spec, prefix):
mkdir(prefix.bin)
mkdir(prefix.doc)
install('../spack-build/BRANSON', prefix.bin)
install('LICENSE.md', prefix.doc)
install('README.md', prefix.doc)
| lgpl-2.1 |
MiltosD/CEF-ELRC | metashare/repository/editor/editorutils.py | 3 | 2218 | '''
A number of utility functions and classes which do not easily fit into a single
place.
To avoid circular imports, this file must not import anything from
metashare.repository.editor.
'''
from django.db import models
from django.contrib.admin.views.main import ChangeList
from django.core.urlresolvers import reverse
# inline names included in fieldsets are prepended with an '_'
def encode_as_inline(name):
return '_' + name
def decode_inline(fieldname):
if fieldname.startswith('_'):
name = fieldname[1:]
return name
else:
return fieldname
def is_inline(fieldname):
if fieldname.startswith('_'):
return True
else:
return False
class FilteredChangeList(ChangeList):
"""
A FilteredChangeList filters the result_list for request.user objects.
This implementation always filters; use the superclass ChangeList for
unfiltered views.
"""
def __init__(self, request, model, list_display, list_display_links,
list_filter, date_hierarchy, search_fields, list_select_related,
list_per_page, list_editable, model_admin):
# Call super constructor to initialise object instance.
super(FilteredChangeList, self).__init__(request, model, list_display,
list_display_links, list_filter, date_hierarchy, search_fields,
list_select_related, list_per_page, list_editable, model_admin)
# Check if the current model has an "owners" ManyToManyField.
_has_owners_field = False
if 'owners' in self.opts.get_all_field_names():
_field = self.opts.get_field_by_name('owners')[0]
_has_owners_field = isinstance(_field, models.ManyToManyField)
# If "owners" are available, we
# have to constrain the QuerySet using an additional filter...
if _has_owners_field:
_user = request.user
self.root_query_set = self.root_query_set.filter(owners=_user)
self.query_set = self.get_query_set()
self.get_results(request)
def url_for_result(self, result):
return reverse("editor:{}_{}_change".format(self.opts.app_label, self.opts.module_name), args=(getattr(result, self.pk_attname),)) | bsd-3-clause |
therealjumbo/python_summer | py31eg/print_unicode_ans.py | 2 | 1561 | #!/usr/bin/env python3
# Copyright (c) 2008-11 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. It is provided for educational
# purposes and is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import sys
import unicodedata
def print_unicode_table(words):
print("decimal hex chr {0:^40}".format("name"))
print("------- ----- --- {0:-<40}".format(""))
code = ord(" ")
end = min(0xD800, sys.maxunicode) # Stop at surrogate pairs
while code < end:
c = chr(code)
name = unicodedata.name(c, "*** unknown ***")
ok = True
for word in words:
if word not in name.lower():
ok = False
break
if ok:
print("{0:7} {0:5X} {0:^3c} {1}".format(
code, name.title()))
code += 1
words = []
if len(sys.argv) > 1:
if sys.argv[1] in ("-h", "--help"):
print("usage: {0} [string1 [string2 [... stringN]]]".format(
sys.argv[0]))
words = None
else:
for word in sys.argv[1:]:
words.append(word.lower())
if words is not None:
print_unicode_table(words)
| gpl-3.0 |
gcw/pycon2014-web2py-tutorial | applications/lemessage/routes.example.py | 105 | 1470 | # -*- coding: utf-8 -*-
# This is an app-specific example router
#
# This simple router is used for setting languages from app/languages directory
# as a part of the application path: app/<lang>/controller/function
# Language from default.py or 'en' (if the file is not found) is used as
# a default_language
#
# See <web2py-root-dir>/router.example.py for parameter's detail
#-------------------------------------------------------------------------------------
# To enable this route file you must do the steps:
#
# 1. rename <web2py-root-dir>/router.example.py to routes.py
# 2. rename this APP/routes.example.py to APP/routes.py
# (where APP - is your application directory)
# 3. restart web2py (or reload routes in web2py admin interfase)
#
# YOU CAN COPY THIS FILE TO ANY APPLICATION'S ROOT DIRECTORY WITHOUT CHANGES!
from fileutils import abspath
from languages import read_possible_languages
possible_languages = read_possible_languages(abspath('applications', app))
#NOTE! app - is an application based router's parameter with name of an
# application. E.g.'welcome'
routers = {
app: dict(
default_language = possible_languages['default'][0],
languages = [lang for lang in possible_languages
if lang != 'default']
)
}
#NOTE! To change language in your application using these rules add this line
#in one of your models files:
# if request.uri_language: T.force(request.uri_language)
| bsd-2-clause |
astrofrog/ginga | ginga/gtkw/FileSelection.py | 5 | 1784 | #
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
import gtk
# TODO: this code should be shared with integgui and maybe others
class FileSelection(object):
# Get the selected filename
def file_ok_sel(self, w, rsp):
self.close(w)
if rsp == 0:
return
filepath = self.filew.get_filename()
self.callfn(filepath)
def __init__(self, action=gtk.FILE_CHOOSER_ACTION_OPEN):
# Create a new file selection widget
self.filew = gtk.FileChooserDialog(title="Select a file",
action=action)
self.filew.connect("destroy", self.close)
if action == gtk.FILE_CHOOSER_ACTION_SAVE:
self.filew.add_buttons(gtk.STOCK_SAVE, 1, gtk.STOCK_CANCEL, 0)
else:
self.filew.add_buttons(gtk.STOCK_OPEN, 1, gtk.STOCK_CANCEL, 0)
self.filew.set_default_response(1)
# Connect the ok_button to file_ok_sel method
#self.filew.ok_button.connect("clicked", self.file_ok_sel)
self.filew.connect("response", self.file_ok_sel)
# Connect the cancel_button to destroy the widget
#self.filew.cancel_button.connect("clicked", self.close)
def popup(self, title, callfn, initialdir=None,
filename=None):
self.callfn = callfn
self.filew.set_title(title)
if initialdir:
self.filew.set_current_folder(initialdir)
if filename:
#self.filew.set_filename(filename)
self.filew.set_current_name(filename)
self.filew.show()
def close(self, widget):
self.filew.hide()
#END
| bsd-3-clause |
danielrh/losslessh264 | build/mktargets.py | 75 | 6693 | #!/usr/bin/python
import sys
import argparse
import os
parser = argparse.ArgumentParser(description="Make helper parser")
parser.add_argument("--directory", dest="directory", required=True)
parser.add_argument("--library", dest="library", help="Make a library")
parser.add_argument("--binary", dest="binary", help="Make a binary")
parser.add_argument("--prefix", dest="prefix", help="Make a set of objs")
parser.add_argument("--exclude", dest="exclude", help="Exclude file", action="append")
parser.add_argument("--include", dest="include", help="Include file", action="append")
parser.add_argument("--out", dest="out", help="Output file")
parser.add_argument("--cpp-suffix", dest="cpp_suffix", help="C++ file suffix")
PREFIX=None
LIBRARY=None
BINARY=None
EXCLUDE=[]
INCLUDE=[]
OUTFILE="targets.mk"
CPP_SUFFIX=".cpp"
def write_cpp_rule_pattern(f):
src = "$(%s_SRCDIR)/%%%s"%(PREFIX, CPP_SUFFIX)
dst = "$(%s_SRCDIR)/%%.$(OBJ)"%(PREFIX)
f.write("%s: %s\n"%(dst, src))
f.write('\t$(QUIET_CXX)$(CXX) $(CFLAGS) $(CXXFLAGS) $(INCLUDES) $(' + PREFIX + '_CFLAGS) $(' + PREFIX + '_INCLUDES) -c $(CXX_O) $<\n')
f.write("\n")
def write_c_rule_pattern(f):
src = "$(%s_SRCDIR)/%%.c"%(PREFIX)
dst = "$(%s_SRCDIR)/%%.$(OBJ)"%(PREFIX)
f.write("%s: %s\n"%(dst, src))
f.write('\t$(QUIET_CC)$(CC) $(CFLAGS) $(INCLUDES) $(' + PREFIX + '_CFLAGS) $(' + PREFIX + '_INCLUDES) -c $(CXX_O) $<\n')
f.write("\n")
def write_asm_rule_pattern(f):
src = "$(%s_SRCDIR)/%%.asm"%(PREFIX)
dst = "$(%s_SRCDIR)/%%.$(OBJ)"%(PREFIX)
f.write("%s: %s\n"%(dst, src))
f.write('\t$(QUIET_ASM)$(ASM) $(ASMFLAGS) $(ASM_INCLUDES) $(' + PREFIX + '_ASMFLAGS) $(' + PREFIX + '_ASM_INCLUDES) -o $@ $<\n')
f.write("\n")
def write_asm_s_rule_pattern(f):
src = "$(%s_SRCDIR)/%%.S"%(PREFIX)
dst = "$(%s_SRCDIR)/%%.$(OBJ)"%(PREFIX)
f.write("%s: %s\n"%(dst, src))
f.write('\t$(QUIET_CCAS)$(CCAS) $(CCASFLAGS) $(ASMFLAGS) $(INCLUDES) $(' + PREFIX + '_CFLAGS) $(' + PREFIX + '_INCLUDES) -c -o $@ $<\n')
f.write("\n")
def find_sources():
cpp_files = []
asm_files = []
c_files = []
s_files = []
print EXCLUDE
for dir in os.walk("."):
for file in dir[2]:
if (len(INCLUDE) == 0 and not file in EXCLUDE) or file in INCLUDE:
if os.path.splitext(file)[1] == CPP_SUFFIX:
cpp_files.append(os.path.join(dir[0].strip('./'), file))
if os.path.splitext(file)[1] == '.asm':
asm_files.append(os.path.join(dir[0].strip('./'), file))
if os.path.splitext(file)[1] == '.c':
c_files.append(os.path.join(dir[0].strip('./'), file))
if os.path.splitext(file)[1] == '.S':
s_files.append(os.path.join(dir[0].strip('./'), file))
return [cpp_files, asm_files, c_files, s_files]
args = parser.parse_args()
if args.library is not None:
PREFIX=args.library.upper()
elif args.binary is not None:
PREFIX=args.binary.upper()
elif args.prefix is not None:
PREFIX=args.prefix.upper()
else:
sys.stderr.write("Must provide either library, binary or prefix")
sys.exit(1)
if args.exclude is not None:
EXCLUDE = args.exclude
if args.include is not None:
INCLUDE = args.include
if args.out is not None:
OUTFILE = args.out
else:
OUTFILE = os.path.join(args.directory, OUTFILE)
if args.cpp_suffix is not None:
CPP_SUFFIX = args.cpp_suffix
OUTFILE = os.path.abspath(OUTFILE)
try:
os.chdir(args.directory)
except OSError as e:
sys.stderr.write("Error changing directory to %s\n" % e.filename)
sys.exit(1)
(cpp, asm, cfiles, sfiles) = find_sources()
cpp = sorted(cpp, key=lambda s: s.lower())
asm = sorted(asm, key=lambda s: s.lower())
cfiles = sorted(cfiles, key=lambda s: s.lower())
sfiles = sorted(sfiles, key=lambda s: s.lower())
armfiles = []
arm64files = []
for file in sfiles:
c = file.split('/')
if 'arm64' in c:
arm64files.append(file)
elif 'arm' in c:
armfiles.append(file)
f = open(OUTFILE, "w")
f.write("%s_SRCDIR=%s\n"%(PREFIX, args.directory))
f.write("%s_CPP_SRCS=\\\n"%(PREFIX))
for c in cpp:
f.write("\t$(%s_SRCDIR)/%s\\\n"%(PREFIX, c))
f.write("\n")
f.write("%s_OBJS += $(%s_CPP_SRCS:%s=.$(OBJ))\n\n"%(PREFIX, PREFIX, CPP_SUFFIX))
if len(cfiles) > 0:
f.write("%s_C_SRCS=\\\n"%(PREFIX))
for cfile in cfiles:
f.write("\t$(%s_SRCDIR)/%s\\\n"%(PREFIX, cfile))
f.write("\n")
f.write("%s_OBJS += $(%s_C_SRCS:.c=.$(OBJ))\n"%(PREFIX, PREFIX))
if len(asm) > 0:
f.write("%s_ASM_SRCS=\\\n"%(PREFIX))
for c in asm:
f.write("\t$(%s_SRCDIR)/%s\\\n"%(PREFIX, c))
f.write("\n")
f.write("%s_OBJSASM += $(%s_ASM_SRCS:.asm=.$(OBJ))\n"%(PREFIX, PREFIX))
f.write("ifeq ($(ASM_ARCH), x86)\n")
f.write("%s_OBJS += $(%s_OBJSASM)\n"%(PREFIX,PREFIX))
f.write("endif\n")
f.write("OBJS += $(%s_OBJSASM)\n\n"%(PREFIX))
if len(armfiles) > 0:
f.write("%s_ASM_ARM_SRCS=\\\n"%(PREFIX))
for c in armfiles:
f.write("\t$(%s_SRCDIR)/%s\\\n"%(PREFIX, c))
f.write("\n")
f.write("%s_OBJSARM += $(%s_ASM_ARM_SRCS:.S=.$(OBJ))\n"%(PREFIX, PREFIX))
f.write("ifeq ($(ASM_ARCH), arm)\n")
f.write("%s_OBJS += $(%s_OBJSARM)\n"%(PREFIX,PREFIX))
f.write("endif\n")
f.write("OBJS += $(%s_OBJSARM)\n\n"%(PREFIX))
if len(arm64files) > 0:
f.write("%s_ASM_ARM64_SRCS=\\\n"%(PREFIX))
for c in arm64files:
f.write("\t$(%s_SRCDIR)/%s\\\n"%(PREFIX, c))
f.write("\n")
f.write("%s_OBJSARM64 += $(%s_ASM_ARM64_SRCS:.S=.$(OBJ))\n"%(PREFIX, PREFIX))
f.write("ifeq ($(ASM_ARCH), arm64)\n")
f.write("%s_OBJS += $(%s_OBJSARM64)\n"%(PREFIX,PREFIX))
f.write("endif\n")
f.write("OBJS += $(%s_OBJSARM64)\n\n"%(PREFIX))
f.write("OBJS += $(%s_OBJS)\n\n"%(PREFIX))
write_cpp_rule_pattern(f)
if len(cfiles) > 0:
write_c_rule_pattern(f)
if len(asm) > 0:
write_asm_rule_pattern(f)
if len(sfiles) > 0:
write_asm_s_rule_pattern(f)
if args.library is not None:
f.write("$(LIBPREFIX)%s.$(LIBSUFFIX): $(%s_OBJS)\n"%(args.library, PREFIX))
f.write("\t$(QUIET)rm -f $@\n")
f.write("\t$(QUIET_AR)$(AR) $(AR_OPTS) $+\n")
f.write("\n")
f.write("libraries: $(LIBPREFIX)%s.$(LIBSUFFIX)\n"%args.library)
f.write("LIBRARIES += $(LIBPREFIX)%s.$(LIBSUFFIX)\n"%args.library)
if args.binary is not None:
f.write("%s$(EXEEXT): $(%s_OBJS) $(%s_DEPS)\n"%(args.binary, PREFIX, PREFIX))
f.write("\t$(QUIET_CXX)$(CXX) $(CXX_LINK_O) $(%s_OBJS) $(%s_LDFLAGS) $(LDFLAGS)\n\n"%(PREFIX, PREFIX))
f.write("binaries: %s$(EXEEXT)\n"%args.binary)
f.write("BINARIES += %s$(EXEEXT)\n"%args.binary)
f.close()
| bsd-2-clause |
pcu4dros/pandora-core | workspace/lib/python3.5/site-packages/flask/cli.py | 45 | 18091 | # -*- coding: utf-8 -*-
"""
flask.cli
~~~~~~~~~
A simple command line application to run flask apps.
:copyright: (c) 2015 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
from threading import Lock, Thread
from functools import update_wrapper
import click
from ._compat import iteritems, reraise
from .helpers import get_debug_flag
from . import __version__
class NoAppException(click.UsageError):
"""Raised if an application cannot be found or loaded."""
def find_best_app(module):
"""Given a module instance this tries to find the best possible
application in the module or raises an exception.
"""
from . import Flask
# Search for the most common names first.
for attr_name in 'app', 'application':
app = getattr(module, attr_name, None)
if app is not None and isinstance(app, Flask):
return app
# Otherwise find the only object that is a Flask instance.
matches = [v for k, v in iteritems(module.__dict__)
if isinstance(v, Flask)]
if len(matches) == 1:
return matches[0]
raise NoAppException('Failed to find application in module "%s". Are '
'you sure it contains a Flask application? Maybe '
'you wrapped it in a WSGI middleware or you are '
'using a factory function.' % module.__name__)
def prepare_exec_for_file(filename):
"""Given a filename this will try to calculate the python path, add it
to the search path and return the actual module name that is expected.
"""
module = []
# Chop off file extensions or package markers
if os.path.split(filename)[1] == '__init__.py':
filename = os.path.dirname(filename)
elif filename.endswith('.py'):
filename = filename[:-3]
else:
raise NoAppException('The file provided (%s) does exist but is not a '
'valid Python file. This means that it cannot '
'be used as application. Please change the '
'extension to .py' % filename)
filename = os.path.realpath(filename)
dirpath = filename
while 1:
dirpath, extra = os.path.split(dirpath)
module.append(extra)
if not os.path.isfile(os.path.join(dirpath, '__init__.py')):
break
sys.path.insert(0, dirpath)
return '.'.join(module[::-1])
def locate_app(app_id):
"""Attempts to locate the application."""
__traceback_hide__ = True
if ':' in app_id:
module, app_obj = app_id.split(':', 1)
else:
module = app_id
app_obj = None
try:
__import__(module)
except ImportError:
raise NoAppException('The file/path provided (%s) does not appear to '
'exist. Please verify the path is correct. If '
'app is not on PYTHONPATH, ensure the extension '
'is .py' % module)
mod = sys.modules[module]
if app_obj is None:
app = find_best_app(mod)
else:
app = getattr(mod, app_obj, None)
if app is None:
raise RuntimeError('Failed to find application in module "%s"'
% module)
return app
def find_default_import_path():
app = os.environ.get('FLASK_APP')
if app is None:
return
if os.path.isfile(app):
return prepare_exec_for_file(app)
return app
def get_version(ctx, param, value):
if not value or ctx.resilient_parsing:
return
message = 'Flask %(version)s\nPython %(python_version)s'
click.echo(message % {
'version': __version__,
'python_version': sys.version,
}, color=ctx.color)
ctx.exit()
version_option = click.Option(['--version'],
help='Show the flask version',
expose_value=False,
callback=get_version,
is_flag=True, is_eager=True)
class DispatchingApp(object):
"""Special application that dispatches to a Flask application which
is imported by name in a background thread. If an error happens
it is recorded and shown as part of the WSGI handling which in case
of the Werkzeug debugger means that it shows up in the browser.
"""
def __init__(self, loader, use_eager_loading=False):
self.loader = loader
self._app = None
self._lock = Lock()
self._bg_loading_exc_info = None
if use_eager_loading:
self._load_unlocked()
else:
self._load_in_background()
def _load_in_background(self):
def _load_app():
__traceback_hide__ = True
with self._lock:
try:
self._load_unlocked()
except Exception:
self._bg_loading_exc_info = sys.exc_info()
t = Thread(target=_load_app, args=())
t.start()
def _flush_bg_loading_exception(self):
__traceback_hide__ = True
exc_info = self._bg_loading_exc_info
if exc_info is not None:
self._bg_loading_exc_info = None
reraise(*exc_info)
def _load_unlocked(self):
__traceback_hide__ = True
self._app = rv = self.loader()
self._bg_loading_exc_info = None
return rv
def __call__(self, environ, start_response):
__traceback_hide__ = True
if self._app is not None:
return self._app(environ, start_response)
self._flush_bg_loading_exception()
with self._lock:
if self._app is not None:
rv = self._app
else:
rv = self._load_unlocked()
return rv(environ, start_response)
class ScriptInfo(object):
"""Help object to deal with Flask applications. This is usually not
necessary to interface with as it's used internally in the dispatching
to click. In future versions of Flask this object will most likely play
a bigger role. Typically it's created automatically by the
:class:`FlaskGroup` but you can also manually create it and pass it
onwards as click object.
"""
def __init__(self, app_import_path=None, create_app=None):
if create_app is None:
if app_import_path is None:
app_import_path = find_default_import_path()
self.app_import_path = app_import_path
else:
app_import_path = None
#: Optionally the import path for the Flask application.
self.app_import_path = app_import_path
#: Optionally a function that is passed the script info to create
#: the instance of the application.
self.create_app = create_app
#: A dictionary with arbitrary data that can be associated with
#: this script info.
self.data = {}
self._loaded_app = None
def load_app(self):
"""Loads the Flask app (if not yet loaded) and returns it. Calling
this multiple times will just result in the already loaded app to
be returned.
"""
__traceback_hide__ = True
if self._loaded_app is not None:
return self._loaded_app
if self.create_app is not None:
rv = self.create_app(self)
else:
if not self.app_import_path:
raise NoAppException(
'Could not locate Flask application. You did not provide '
'the FLASK_APP environment variable.\n\nFor more '
'information see '
'http://flask.pocoo.org/docs/latest/quickstart/')
rv = locate_app(self.app_import_path)
debug = get_debug_flag()
if debug is not None:
rv.debug = debug
self._loaded_app = rv
return rv
pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True)
def with_appcontext(f):
"""Wraps a callback so that it's guaranteed to be executed with the
script's application context. If callbacks are registered directly
to the ``app.cli`` object then they are wrapped with this function
by default unless it's disabled.
"""
@click.pass_context
def decorator(__ctx, *args, **kwargs):
with __ctx.ensure_object(ScriptInfo).load_app().app_context():
return __ctx.invoke(f, *args, **kwargs)
return update_wrapper(decorator, f)
class AppGroup(click.Group):
"""This works similar to a regular click :class:`~click.Group` but it
changes the behavior of the :meth:`command` decorator so that it
automatically wraps the functions in :func:`with_appcontext`.
Not to be confused with :class:`FlaskGroup`.
"""
def command(self, *args, **kwargs):
"""This works exactly like the method of the same name on a regular
:class:`click.Group` but it wraps callbacks in :func:`with_appcontext`
unless it's disabled by passing ``with_appcontext=False``.
"""
wrap_for_ctx = kwargs.pop('with_appcontext', True)
def decorator(f):
if wrap_for_ctx:
f = with_appcontext(f)
return click.Group.command(self, *args, **kwargs)(f)
return decorator
def group(self, *args, **kwargs):
"""This works exactly like the method of the same name on a regular
:class:`click.Group` but it defaults the group class to
:class:`AppGroup`.
"""
kwargs.setdefault('cls', AppGroup)
return click.Group.group(self, *args, **kwargs)
class FlaskGroup(AppGroup):
"""Special subclass of the :class:`AppGroup` group that supports
loading more commands from the configured Flask app. Normally a
developer does not have to interface with this class but there are
some very advanced use cases for which it makes sense to create an
instance of this.
For information as of why this is useful see :ref:`custom-scripts`.
:param add_default_commands: if this is True then the default run and
shell commands wil be added.
:param add_version_option: adds the ``--version`` option.
:param create_app: an optional callback that is passed the script info
and returns the loaded app.
"""
def __init__(self, add_default_commands=True, create_app=None,
add_version_option=True, **extra):
params = list(extra.pop('params', None) or ())
if add_version_option:
params.append(version_option)
AppGroup.__init__(self, params=params, **extra)
self.create_app = create_app
if add_default_commands:
self.add_command(run_command)
self.add_command(shell_command)
self._loaded_plugin_commands = False
def _load_plugin_commands(self):
if self._loaded_plugin_commands:
return
try:
import pkg_resources
except ImportError:
self._loaded_plugin_commands = True
return
for ep in pkg_resources.iter_entry_points('flask.commands'):
self.add_command(ep.load(), ep.name)
self._loaded_plugin_commands = True
def get_command(self, ctx, name):
self._load_plugin_commands()
# We load built-in commands first as these should always be the
# same no matter what the app does. If the app does want to
# override this it needs to make a custom instance of this group
# and not attach the default commands.
#
# This also means that the script stays functional in case the
# application completely fails.
rv = AppGroup.get_command(self, ctx, name)
if rv is not None:
return rv
info = ctx.ensure_object(ScriptInfo)
try:
rv = info.load_app().cli.get_command(ctx, name)
if rv is not None:
return rv
except NoAppException:
pass
def list_commands(self, ctx):
self._load_plugin_commands()
# The commands available is the list of both the application (if
# available) plus the builtin commands.
rv = set(click.Group.list_commands(self, ctx))
info = ctx.ensure_object(ScriptInfo)
try:
rv.update(info.load_app().cli.list_commands(ctx))
except Exception:
# Here we intentionally swallow all exceptions as we don't
# want the help page to break if the app does not exist.
# If someone attempts to use the command we try to create
# the app again and this will give us the error.
pass
return sorted(rv)
def main(self, *args, **kwargs):
obj = kwargs.get('obj')
if obj is None:
obj = ScriptInfo(create_app=self.create_app)
kwargs['obj'] = obj
kwargs.setdefault('auto_envvar_prefix', 'FLASK')
return AppGroup.main(self, *args, **kwargs)
@click.command('run', short_help='Runs a development server.')
@click.option('--host', '-h', default='127.0.0.1',
help='The interface to bind to.')
@click.option('--port', '-p', default=5000,
help='The port to bind to.')
@click.option('--reload/--no-reload', default=None,
help='Enable or disable the reloader. By default the reloader '
'is active if debug is enabled.')
@click.option('--debugger/--no-debugger', default=None,
help='Enable or disable the debugger. By default the debugger '
'is active if debug is enabled.')
@click.option('--eager-loading/--lazy-loader', default=None,
help='Enable or disable eager loading. By default eager '
'loading is enabled if the reloader is disabled.')
@click.option('--with-threads/--without-threads', default=False,
help='Enable or disable multithreading.')
@pass_script_info
def run_command(info, host, port, reload, debugger, eager_loading,
with_threads):
"""Runs a local development server for the Flask application.
This local server is recommended for development purposes only but it
can also be used for simple intranet deployments. By default it will
not support any sort of concurrency at all to simplify debugging. This
can be changed with the --with-threads option which will enable basic
multithreading.
The reloader and debugger are by default enabled if the debug flag of
Flask is enabled and disabled otherwise.
"""
from werkzeug.serving import run_simple
debug = get_debug_flag()
if reload is None:
reload = bool(debug)
if debugger is None:
debugger = bool(debug)
if eager_loading is None:
eager_loading = not reload
app = DispatchingApp(info.load_app, use_eager_loading=eager_loading)
# Extra startup messages. This depends a bit on Werkzeug internals to
# not double execute when the reloader kicks in.
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
# If we have an import path we can print it out now which can help
# people understand what's being served. If we do not have an
# import path because the app was loaded through a callback then
# we won't print anything.
if info.app_import_path is not None:
print(' * Serving Flask app "%s"' % info.app_import_path)
if debug is not None:
print(' * Forcing debug mode %s' % (debug and 'on' or 'off'))
run_simple(host, port, app, use_reloader=reload,
use_debugger=debugger, threaded=with_threads)
@click.command('shell', short_help='Runs a shell in the app context.')
@with_appcontext
def shell_command():
"""Runs an interactive Python shell in the context of a given
Flask application. The application will populate the default
namespace of this shell according to it's configuration.
This is useful for executing small snippets of management code
without having to manually configuring the application.
"""
import code
from flask.globals import _app_ctx_stack
app = _app_ctx_stack.top.app
banner = 'Python %s on %s\nApp: %s%s\nInstance: %s' % (
sys.version,
sys.platform,
app.import_name,
app.debug and ' [debug]' or '',
app.instance_path,
)
ctx = {}
# Support the regular Python interpreter startup script if someone
# is using it.
startup = os.environ.get('PYTHONSTARTUP')
if startup and os.path.isfile(startup):
with open(startup, 'r') as f:
eval(compile(f.read(), startup, 'exec'), ctx)
ctx.update(app.make_shell_context())
code.interact(banner=banner, local=ctx)
cli = FlaskGroup(help="""\
This shell command acts as general utility script for Flask applications.
It loads the application configured (through the FLASK_APP environment
variable) and then provides commands either provided by the application or
Flask itself.
The most useful commands are the "run" and "shell" command.
Example usage:
\b
%(prefix)s%(cmd)s FLASK_APP=hello.py
%(prefix)s%(cmd)s FLASK_DEBUG=1
%(prefix)sflask run
""" % {
'cmd': os.name == 'posix' and 'export' or 'set',
'prefix': os.name == 'posix' and '$ ' or '',
})
def main(as_module=False):
this_module = __package__ + '.cli'
args = sys.argv[1:]
if as_module:
if sys.version_info >= (2, 7):
name = 'python -m ' + this_module.rsplit('.', 1)[0]
else:
name = 'python -m ' + this_module
# This module is always executed as "python -m flask.run" and as such
# we need to ensure that we restore the actual command line so that
# the reloader can properly operate.
sys.argv = ['-m', this_module] + sys.argv[1:]
else:
name = None
cli.main(args=args, prog_name=name)
if __name__ == '__main__':
main(as_module=True)
| mit |
beeftornado/sentry | src/sentry/api/endpoints/group_events.py | 1 | 5127 | from __future__ import absolute_import
import six
from datetime import timedelta
from django.utils import timezone
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from functools import partial
from sentry import eventstore
from sentry.api.base import EnvironmentMixin
from sentry.api.bases import GroupEndpoint
from sentry.api.event_search import get_filter, InvalidSearchQuery
from sentry.api.exceptions import ResourceDoesNotExist
from sentry.api.helpers.environments import get_environments
from sentry.api.helpers.events import get_direct_hit_response
from sentry.api.serializers import EventSerializer, serialize, SimpleEventSerializer
from sentry.api.paginator import GenericOffsetPaginator
from sentry.api.utils import get_date_range_from_params, InvalidParams
from sentry.search.utils import InvalidQuery, parse_query
class NoResults(Exception):
pass
class GroupEventsError(Exception):
pass
class GroupEventsEndpoint(GroupEndpoint, EnvironmentMixin):
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:qparam bool full: if this is set to true then the event payload will
include the full event body, including the stacktrace.
Set to 1 to enable.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
try:
environments = get_environments(request, group.project.organization)
query, tags = self._get_search_query_and_tags(request, group, environments)
except InvalidQuery as exc:
return Response({"detail": six.text_type(exc)}, status=400)
except (NoResults, ResourceDoesNotExist):
return Response([])
try:
start, end = get_date_range_from_params(request.GET, optional=True)
except InvalidParams as e:
raise ParseError(detail=six.text_type(e))
try:
return self._get_events_snuba(request, group, environments, query, tags, start, end)
except GroupEventsError as exc:
raise ParseError(detail=six.text_type(exc))
def _get_events_snuba(self, request, group, environments, query, tags, start, end):
default_end = timezone.now()
default_start = default_end - timedelta(days=90)
params = {
"group_ids": [group.id],
"project_id": [group.project_id],
"organization_id": group.project.organization_id,
"start": start if start else default_start,
"end": end if end else default_end,
}
direct_hit_resp = get_direct_hit_response(request, query, params, "api.group-events")
if direct_hit_resp:
return direct_hit_resp
if environments:
params["environment"] = [env.name for env in environments]
full = request.GET.get("full", False)
try:
snuba_filter = get_filter(request.GET.get("query", None), params)
except InvalidSearchQuery as e:
raise ParseError(detail=six.text_type(e))
snuba_filter.conditions.append(["event.type", "!=", "transaction"])
data_fn = partial(eventstore.get_events, referrer="api.group-events", filter=snuba_filter)
serializer = EventSerializer() if full else SimpleEventSerializer()
return self.paginate(
request=request,
on_results=lambda results: serialize(results, request.user, serializer),
paginator=GenericOffsetPaginator(data_fn=data_fn),
)
def _get_search_query_and_tags(self, request, group, environments=None):
raw_query = request.GET.get("query")
if raw_query:
query_kwargs = parse_query([group.project], raw_query, request.user, environments)
query = query_kwargs.pop("query", None)
tags = query_kwargs.pop("tags", {})
else:
query = None
tags = {}
if environments:
env_names = set(env.name for env in environments)
if "environment" in tags:
# If a single environment was passed as part of the query, then
# we'll just search for that individual environment in this
# query, even if more are selected.
if tags["environment"] not in env_names:
# An event can only be associated with a single
# environment, so if the environments associated with
# the request don't contain the environment provided as a
# tag lookup, the query cannot contain any valid results.
raise NoResults
else:
# XXX: Handle legacy backends here. Just store environment as a
# single tag if we only have one so that we don't break existing
# usage.
tags["environment"] = list(env_names) if len(env_names) > 1 else env_names.pop()
return query, tags
| bsd-3-clause |
baris/fullerite | src/diamond/collectors/xen_collector/test/testxen.py | 29 | 2970 | #!/usr/bin/python
# coding=utf-8
###############################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
from diamond.collector import Collector
from xen_collector import XENCollector
###############################################################################
def run_only_if_libvirt_is_available(func):
try:
import libvirt
except ImportError:
libvirt = None
pred = lambda: libvirt is not None
return run_only(func, pred)
class TestXENCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('XENCollector', {
})
self.collector = XENCollector(config, None)
def test_import(self):
self.assertTrue(XENCollector)
@run_only_if_libvirt_is_available
@patch('os.statvfs')
@patch('libvirt.openReadOnly')
@patch.object(Collector, 'publish')
def test_centos6(self, publish_mock, libvirt_mock, os_mock):
class info:
def __init__(self, id):
self.id = id
def info(self):
if self.id == 0:
return [1, 49420888L, 49420888L, 8, 911232000000000L]
if self.id == 1:
return [1, 2097152L, 2097152L, 2, 310676150000000L]
if self.id == 2:
return [1, 2097152L, 2097152L, 2, 100375300000000L]
if self.id == 3:
return [1, 10485760L, 10485760L, 2, 335312040000000L]
if self.id == 4:
return [1, 10485760L, 10485760L, 2, 351313480000000L]
libvirt_m = Mock()
libvirt_m.getInfo.return_value = ['x86_64', 48262, 8, 1200, 2, 1, 4, 1]
libvirt_m.listDomainsID.return_value = [0, 2, 1, 4, 3]
def lookupByIdMock(id):
lookup = info(id)
return lookup
libvirt_m.lookupByID = lookupByIdMock
libvirt_mock.return_value = libvirt_m
statsvfs_mock = Mock()
statsvfs_mock.f_bavail = 74492145
statsvfs_mock.f_frsize = 4096
os_mock.return_value = statsvfs_mock
self.collector.collect()
metrics = {
'TotalCores': 8.000000,
'InstalledMem': 48262.000000,
'MemAllocated': 24576.000000,
'MemFree': 23686.000000,
'DiskFree': 297968580.000000,
'FreeCores': 0.000000,
'AllocatedCores': 8.000000,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
###############################################################################
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
vygr/Python-PCB | pcb.py | 1 | 3869 | #!/opt/local/bin/pypy -tt
# -*- coding: utf-8 -*-
#Copyright (C) 2014 Chris Hinsley All Rights Reserved
import sys, argparse, router
from copy import deepcopy
from ast import literal_eval
from mymath import *
def main():
parser = argparse.ArgumentParser(description = 'Pcb layout optimizer.', formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument('infile', nargs = '?', type = argparse.FileType('r'), default = sys.stdin, help = 'filename, default stdin')
parser.add_argument('--t', nargs = 1, type = int, default = [600], help = 'timeout in seconds, default 600')
parser.add_argument('--v', nargs = 1, type = int, default = [0], choices = range(0, 2), help = 'verbosity level 0..1, default 0')
parser.add_argument('--s', nargs = 1, type = int, default = [1], help = 'number of samples, default 1')
parser.add_argument('--r', nargs = 1, type = int, default = [1], choices = range(1, 5), help = 'grid resolution 1..4, default 1')
parser.add_argument('--z', nargs = 1, type = int, default = [0], choices = range(0, 2), help = 'minimize vias 0..1, default 0')
parser.add_argument('--d', nargs = 1, type = int, default = [0], choices = range(0, 6), \
help = 'distance metric 0..5, default 0.\n' \
'0 -> manhattan\n1 -> squared_euclidean\n2 -> euclidean\n3 -> chebyshev\n4 -> reciprocal\n5 -> random')
parser.add_argument('--fr', nargs = 1, type = int, default = [2], choices = range(1, 6), help = 'flood range 1..5, default 2')
parser.add_argument('--xr', nargs = 1, type = int, default = [1], choices = range(0, 6), help = 'even layer x range 0..5, default 1')
parser.add_argument('--yr', nargs = 1, type = int, default = [1], choices = range(0, 6), help = 'odd layer y range 0..5, default 1')
args = parser.parse_args()
flood_range = args.fr[0]
flood_range_x_even_layer = args.xr[0]
flood_range_y_odd_layer = args.yr[0]
path_range = flood_range + 0
path_range_x_even_layer = flood_range_x_even_layer + 0
path_range_y_odd_layer = flood_range_y_odd_layer + 0
routing_flood_vectors = [[(x, y, 0) for x in xrange(-flood_range_x_even_layer, flood_range_x_even_layer + 1) for y in xrange(-flood_range, flood_range + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= flood_range] + [(0, 0, -1), (0, 0, 1)], \
[(x, y, 0) for x in xrange(-flood_range, flood_range + 1) for y in xrange(-flood_range_y_odd_layer, flood_range_y_odd_layer + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= flood_range] + [(0, 0, -1), (0, 0, 1)]]
routing_path_vectors = [[(x, y, 0) for x in xrange(-path_range_x_even_layer, path_range_x_even_layer + 1) for y in xrange(-path_range, path_range + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= path_range] + [(0, 0, -1), (0, 0, 1)], \
[(x, y, 0) for x in xrange(-path_range, path_range + 1) for y in xrange(-path_range_y_odd_layer, path_range_y_odd_layer + 1) \
if length_2d((x, y)) > 0.1 and length_2d((x, y)) <= path_range] + [(0, 0, -1), (0, 0, 1)]]
dfunc = [manhattan_distance, squared_euclidean_distance, euclidean_distance, \
chebyshev_distance, reciprical_distance, random_distance][args.d[0]]
dimensions = literal_eval(args.infile.readline().strip())
pcb = router.Pcb(dimensions, routing_flood_vectors, routing_path_vectors, dfunc, args.r[0], args.v[0], args.z[0])
for line in args.infile:
track = literal_eval(line.strip())
if not track:
break
pcb.add_track(track)
args.infile.close()
pcb.print_pcb()
best_cost = None
best_pcb = None
for i in xrange(args.s[0]):
if not pcb.route(args.t[0]):
pcb.shuffle_netlist()
continue
cost = pcb.cost()
if best_cost == None or cost < best_cost:
best_cost = cost
best_pcb = deepcopy(pcb)
pcb.shuffle_netlist()
if best_pcb != None:
best_pcb.print_netlist()
best_pcb.print_stats()
else:
print []
if __name__ == '__main__':
main()
| gpl-2.0 |
abomyi/django | tests/utils_tests/test_termcolors.py | 337 | 6461 | import unittest
from django.utils.termcolors import (
DARK_PALETTE, DEFAULT_PALETTE, LIGHT_PALETTE, NOCOLOR_PALETTE, PALETTES,
colorize, parse_color_setting,
)
class TermColorTests(unittest.TestCase):
def test_empty_string(self):
self.assertEqual(parse_color_setting(''), PALETTES[DEFAULT_PALETTE])
def test_simple_palette(self):
self.assertEqual(parse_color_setting('light'), PALETTES[LIGHT_PALETTE])
self.assertEqual(parse_color_setting('dark'), PALETTES[DARK_PALETTE])
self.assertEqual(parse_color_setting('nocolor'), None)
def test_fg(self):
self.assertEqual(parse_color_setting('error=green'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
def test_fg_bg(self):
self.assertEqual(parse_color_setting('error=green/blue'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'}))
def test_fg_opts(self):
self.assertEqual(parse_color_setting('error=green,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
self.assertEqual(parse_color_setting('error=green,bold,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink', 'bold')}))
def test_fg_bg_opts(self):
self.assertEqual(parse_color_setting('error=green/blue,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink',)}))
self.assertEqual(parse_color_setting('error=green/blue,bold,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink', 'bold')}))
def test_override_palette(self):
self.assertEqual(parse_color_setting('light;error=green'),
dict(PALETTES[LIGHT_PALETTE], ERROR={'fg': 'green'}))
def test_override_nocolor(self):
self.assertEqual(parse_color_setting('nocolor;error=green'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
def test_reverse_override(self):
self.assertEqual(parse_color_setting('error=green;light'), PALETTES[LIGHT_PALETTE])
def test_multiple_roles(self):
self.assertEqual(parse_color_setting('error=green;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}, SQL_FIELD={'fg': 'blue'}))
def test_override_with_multiple_roles(self):
self.assertEqual(parse_color_setting('light;error=green;sql_field=blue'),
dict(PALETTES[LIGHT_PALETTE], ERROR={'fg': 'green'}, SQL_FIELD={'fg': 'blue'}))
def test_empty_definition(self):
self.assertEqual(parse_color_setting(';'), None)
self.assertEqual(parse_color_setting('light;'), PALETTES[LIGHT_PALETTE])
self.assertEqual(parse_color_setting(';;;'), None)
def test_empty_options(self):
self.assertEqual(parse_color_setting('error=green,'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
self.assertEqual(parse_color_setting('error=green,,,'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
self.assertEqual(parse_color_setting('error=green,,blink,,'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
def test_bad_palette(self):
self.assertEqual(parse_color_setting('unknown'), None)
def test_bad_role(self):
self.assertEqual(parse_color_setting('unknown='), None)
self.assertEqual(parse_color_setting('unknown=green'), None)
self.assertEqual(parse_color_setting('unknown=green;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'}))
def test_bad_color(self):
self.assertEqual(parse_color_setting('error='), None)
self.assertEqual(parse_color_setting('error=;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'}))
self.assertEqual(parse_color_setting('error=unknown'), None)
self.assertEqual(parse_color_setting('error=unknown;sql_field=blue'),
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'}))
self.assertEqual(parse_color_setting('error=green/unknown'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
self.assertEqual(parse_color_setting('error=green/blue/something'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'}))
self.assertEqual(parse_color_setting('error=green/blue/something,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink',)}))
def test_bad_option(self):
self.assertEqual(parse_color_setting('error=green,unknown'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
self.assertEqual(parse_color_setting('error=green,unknown,blink'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
def test_role_case(self):
self.assertEqual(parse_color_setting('ERROR=green'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
self.assertEqual(parse_color_setting('eRrOr=green'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
def test_color_case(self):
self.assertEqual(parse_color_setting('error=GREEN'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
self.assertEqual(parse_color_setting('error=GREEN/BLUE'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'}))
self.assertEqual(parse_color_setting('error=gReEn'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
self.assertEqual(parse_color_setting('error=gReEn/bLuE'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'}))
def test_opts_case(self):
self.assertEqual(parse_color_setting('error=green,BLINK'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
self.assertEqual(parse_color_setting('error=green,bLiNk'),
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
def test_colorize_empty_text(self):
self.assertEqual(colorize(text=None), '\x1b[m\x1b[0m')
self.assertEqual(colorize(text=''), '\x1b[m\x1b[0m')
self.assertEqual(colorize(text=None, opts=('noreset')), '\x1b[m')
self.assertEqual(colorize(text='', opts=('noreset')), '\x1b[m')
| bsd-3-clause |
CanalTP/kirin | tests/integration/piv_worker_test.py | 1 | 6425 | # coding: utf8
#
# Copyright (c) 2020, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# [matrix] channel #navitia:matrix.org (https://app.element.io/#/room/#navitia:matrix.org)
# https://groups.google.com/d/forum/navitia
# www.navitia.io
from __future__ import absolute_import, print_function, unicode_literals, division
from kirin.command.piv_worker import PivWorker, PIV_LOCK_NAME, PIV_WORKER_REDIS_TIMEOUT_LOCK
from kirin.core.model import RealTimeUpdate
from kirin.piv.piv import get_piv_contributor
from tests.integration.conftest import PIV_CONTRIBUTOR_ID, PIV_EXCHANGE_NAME, PIV_QUEUE_NAME
from amqp.exceptions import NotFound
from kombu import Connection, Exchange
import pytest
import logging
import threading
from retrying import retry
from mock import patch
from datetime import datetime
import time
@retry(stop_max_delay=20000, wait_exponential_multiplier=100)
def wait_until(predicate):
assert predicate()
def is_exchange_created(connection, exchange_name, exchange_type="fanout"):
try:
channel = connection.channel()
channel.exchange_declare(exchange_name, exchange_type, nowait=False, passive=True)
except NotFound:
return False
except Exception as e:
raise e
return True
def is_queue_created(connection, queue_name):
try:
channel = connection.channel()
channel.queue_declare(queue=queue_name, nowait=False, passive=True)
except NotFound:
return False
except Exception as e:
raise e
return True
@pytest.fixture(scope="session", autouse=True)
def broker_connection(rabbitmq_docker_fixture):
return Connection(rabbitmq_docker_fixture.url)
@pytest.fixture(scope="session", autouse=True)
def mq_handler(rabbitmq_docker_fixture, broker_connection):
return rabbitmq_docker_fixture.create_rabbitmq_handler(PIV_EXCHANGE_NAME, "fanout")
def create_exchange(broker_connection, exchange_name):
exchange = Exchange(
exchange_name,
durable=True,
delivery_mode=2,
type=str("fanout"),
auto_delete=False,
no_declare=False,
)
exchange.declare(channel=broker_connection.channel())
# Use scope 'function' so the Exchange is recreated for every test.
# It is useful because some tests are deleting the Exchange.
@pytest.fixture(scope="function", autouse=True)
def init_piv_exchange(broker_connection):
create_exchange(broker_connection, PIV_EXCHANGE_NAME)
assert is_exchange_created(broker_connection, PIV_EXCHANGE_NAME)
def launch_piv_worker(pg_docker_fixture):
from kirin import app
from tests.conftest import init_flask_db
from tests.integration.conftest import PIV_CONTRIBUTOR_ID
with app.app_context():
# re-init the db by overriding the db_url
init_flask_db(pg_docker_fixture)
contributor = get_piv_contributor(PIV_CONTRIBUTOR_ID)
with PivWorker(contributor) as worker:
worker.run()
class PivWorkerTest:
def __init__(self, test_client, broker_url, broker_connection, pg_docker_fixture):
self.test_client = test_client
self.broker_url = broker_url
self.broker_connection = broker_connection
self.pg_docker_fixture = pg_docker_fixture
self.last_lock_update = datetime.now()
def __enter__(self):
# Launch a PivWorker
self.thread = threading.Thread(target=launch_piv_worker, args=(self.pg_docker_fixture,))
self.thread.start()
wait_until(lambda: self.thread.is_alive())
# Check that PivWorker is ready (a good hint is when queue is created)
wait_until(lambda: is_queue_created(self.broker_connection, PIV_QUEUE_NAME))
def __exit__(self, type, value, traceback):
# Remove the contributor
self.test_client.delete("/contributors/{}".format(PIV_CONTRIBUTOR_ID))
# PivWorker should die eventually when no PIV contributors is available
wait_until(lambda: not self.thread.is_alive())
def test_mq_message_received_and_stored(
test_client, pg_docker_fixture, rabbitmq_docker_fixture, broker_connection, mq_handler
):
with PivWorkerTest(test_client, rabbitmq_docker_fixture.url, broker_connection, pg_docker_fixture):
# Check that PivWorker is creating the queue
wait_until(lambda: is_queue_created(broker_connection, PIV_QUEUE_NAME))
# Check that MQ message is received and stored in DB
mq_handler.publish(str('{"key": "Some valid JSON"}'), PIV_CONTRIBUTOR_ID)
wait_until(lambda: RealTimeUpdate.query.count() == 1)
def test_redis_lock_update(
test_client,
pg_docker_fixture,
rabbitmq_docker_fixture,
broker_connection,
mq_handler,
):
logger = logging.getLogger("kirin.command.piv_worker")
with patch.object(logger, "debug") as mock_debug:
with PivWorkerTest(test_client, rabbitmq_docker_fixture.url, broker_connection, pg_docker_fixture):
# Check that PivWorker is creating the queue
wait_until(lambda: is_queue_created(broker_connection, PIV_QUEUE_NAME))
time.sleep((PIV_WORKER_REDIS_TIMEOUT_LOCK.total_seconds() // 5) + 1)
mq_handler.publish(str('{"key": "Some valid JSON"}'), PIV_CONTRIBUTOR_ID)
wait_until(lambda: RealTimeUpdate.query.count() == 1)
# Check lock refreshed
mock_debug.assert_any_call("lock {%s} updated", PIV_LOCK_NAME)
| agpl-3.0 |
dirkrombauts/gherkin3 | python/gherkin/errors.py | 19 | 1996 | class ParserError(Exception):
pass
class ParserException(ParserError):
def __init__(self, message, location):
self.location = location
super(ParserException, self).__init__('(' + str(location['line']) + ':' +
str(location['column'] if 'column' in
location else 0) + '): ' + message)
class NoSuchLanguageException(ParserException):
def __init__(self, language, location):
super(NoSuchLanguageException, self).__init__('Language not supported: ' + language,
location)
class AstBuilderException(ParserException):
pass
class UnexpectedEOFException(ParserException):
def __init__(self, received_token, expected_token_types, state_comment):
message = 'unexpected end of file, expected: ' + ', '.join(expected_token_types)
super(UnexpectedEOFException, self).__init__(message, received_token.location)
class UnexpectedTokenException(ParserException):
def __init__(self, received_token, expected_token_types, state_comment):
message = ("expected: " + ', '.join(expected_token_types) + ", got '" +
received_token.token_value().strip() + "'")
column = received_token.location['column'] if 'column' in received_token.location else None
location = (received_token.location if column else
{'line': received_token.location['line'],
'column': received_token.line.indent + 1})
super(UnexpectedTokenException, self).__init__(message, location)
class CompositeParserException(ParserError):
def __init__(self, errors):
self.errors = errors
super(CompositeParserException, self).__init__("Parser errors:\n" +
'\n'.join([error.args[0] for error in
errors]))
| mit |
dxwu/BinderFilter | resources/android-toolchain-16/lib/python2.7/test/test_compare.py | 194 | 1488 | import unittest
from test import test_support
class Empty:
def __repr__(self):
return '<Empty>'
class Coerce:
def __init__(self, arg):
self.arg = arg
def __repr__(self):
return '<Coerce %s>' % self.arg
def __coerce__(self, other):
if isinstance(other, Coerce):
return self.arg, other.arg
else:
return self.arg, other
class Cmp:
def __init__(self,arg):
self.arg = arg
def __repr__(self):
return '<Cmp %s>' % self.arg
def __cmp__(self, other):
return cmp(self.arg, other)
class ComparisonTest(unittest.TestCase):
set1 = [2, 2.0, 2L, 2+0j, Coerce(2), Cmp(2.0)]
set2 = [[1], (3,), None, Empty()]
candidates = set1 + set2
def test_comparisons(self):
for a in self.candidates:
for b in self.candidates:
if ((a in self.set1) and (b in self.set1)) or a is b:
self.assertEqual(a, b)
else:
self.assertNotEqual(a, b)
def test_id_comparisons(self):
# Ensure default comparison compares id() of args
L = []
for i in range(10):
L.insert(len(L)//2, Empty())
for a in L:
for b in L:
self.assertEqual(cmp(a, b), cmp(id(a), id(b)),
'a=%r, b=%r' % (a, b))
def test_main():
test_support.run_unittest(ComparisonTest)
if __name__ == '__main__':
test_main()
| mit |
salguarnieri/intellij-community | python/lib/Lib/site-packages/django/shortcuts/__init__.py | 71 | 4248 | """
This module collects helper functions and classes that "span" multiple levels
of MVC. In other words, these functions/classes introduce controlled coupling
for convenience's sake.
"""
from django.template import loader, RequestContext
from django.http import HttpResponse, Http404
from django.http import HttpResponseRedirect, HttpResponsePermanentRedirect
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
from django.core import urlresolvers
def render_to_response(*args, **kwargs):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
"""
httpresponse_kwargs = {'mimetype': kwargs.pop('mimetype', None)}
return HttpResponse(loader.render_to_string(*args, **kwargs), **httpresponse_kwargs)
def render(request, *args, **kwargs):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
Uses a RequestContext by default.
"""
httpresponse_kwargs = {
'content_type': kwargs.pop('content_type', None),
'status': kwargs.pop('status', None),
}
kwargs['context_instance'] = kwargs.get('context_instance', RequestContext(request))
return HttpResponse(loader.render_to_string(*args, **kwargs),
**httpresponse_kwargs)
def redirect(to, *args, **kwargs):
"""
Returns an HttpResponseRedirect to the apropriate URL for the arguments
passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urlresolvers.reverse()` will
be used to reverse-resolve the name.
* A URL, which will be used as-is for the redirect location.
By default issues a temporary redirect; pass permanent=True to issue a
permanent redirect
"""
if kwargs.pop('permanent', False):
redirect_class = HttpResponsePermanentRedirect
else:
redirect_class = HttpResponseRedirect
# If it's a model, use get_absolute_url()
if hasattr(to, 'get_absolute_url'):
return redirect_class(to.get_absolute_url())
# Next try a reverse URL resolution.
try:
return redirect_class(urlresolvers.reverse(to, args=args, kwargs=kwargs))
except urlresolvers.NoReverseMatch:
# If this is a callable, re-raise.
if callable(to):
raise
# If this doesn't "feel" like a URL, re-raise.
if '/' not in to and '.' not in to:
raise
# Finally, fall back and assume it's a URL
return redirect_class(to)
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
else:
manager = klass._default_manager
return manager.all()
def get_object_or_404(klass, *args, **kwargs):
"""
Uses get() to return an object, or raises a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
"""
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
def get_list_or_404(klass, *args, **kwargs):
"""
Uses filter() to return a list of objects, or raise a Http404 exception if
the list is empty.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
"""
queryset = _get_queryset(klass)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
return obj_list
| apache-2.0 |
mountain213/neo4jworkshop | REST_clients/Python_client/requests/packages/chardet/sbcharsetprober.py | 2927 | 4793 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
from .compat import wrap_ord
SAMPLE_SIZE = 64
SB_ENOUGH_REL_THRESHOLD = 1024
POSITIVE_SHORTCUT_THRESHOLD = 0.95
NEGATIVE_SHORTCUT_THRESHOLD = 0.05
SYMBOL_CAT_ORDER = 250
NUMBER_OF_SEQ_CAT = 4
POSITIVE_CAT = NUMBER_OF_SEQ_CAT - 1
#NEGATIVE_CAT = 0
class SingleByteCharSetProber(CharSetProber):
def __init__(self, model, reversed=False, nameProber=None):
CharSetProber.__init__(self)
self._mModel = model
# TRUE if we need to reverse every pair in the model lookup
self._mReversed = reversed
# Optional auxiliary prober for name decision
self._mNameProber = nameProber
self.reset()
def reset(self):
CharSetProber.reset(self)
# char order of last character
self._mLastOrder = 255
self._mSeqCounters = [0] * NUMBER_OF_SEQ_CAT
self._mTotalSeqs = 0
self._mTotalChar = 0
# characters that fall in our sampling range
self._mFreqChar = 0
def get_charset_name(self):
if self._mNameProber:
return self._mNameProber.get_charset_name()
else:
return self._mModel['charsetName']
def feed(self, aBuf):
if not self._mModel['keepEnglishLetter']:
aBuf = self.filter_without_english_letters(aBuf)
aLen = len(aBuf)
if not aLen:
return self.get_state()
for c in aBuf:
order = self._mModel['charToOrderMap'][wrap_ord(c)]
if order < SYMBOL_CAT_ORDER:
self._mTotalChar += 1
if order < SAMPLE_SIZE:
self._mFreqChar += 1
if self._mLastOrder < SAMPLE_SIZE:
self._mTotalSeqs += 1
if not self._mReversed:
i = (self._mLastOrder * SAMPLE_SIZE) + order
model = self._mModel['precedenceMatrix'][i]
else: # reverse the order of the letters in the lookup
i = (order * SAMPLE_SIZE) + self._mLastOrder
model = self._mModel['precedenceMatrix'][i]
self._mSeqCounters[model] += 1
self._mLastOrder = order
if self.get_state() == constants.eDetecting:
if self._mTotalSeqs > SB_ENOUGH_REL_THRESHOLD:
cf = self.get_confidence()
if cf > POSITIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, we have a'
'winner\n' %
(self._mModel['charsetName'], cf))
self._mState = constants.eFoundIt
elif cf < NEGATIVE_SHORTCUT_THRESHOLD:
if constants._debug:
sys.stderr.write('%s confidence = %s, below negative'
'shortcut threshhold %s\n' %
(self._mModel['charsetName'], cf,
NEGATIVE_SHORTCUT_THRESHOLD))
self._mState = constants.eNotMe
return self.get_state()
def get_confidence(self):
r = 0.01
if self._mTotalSeqs > 0:
r = ((1.0 * self._mSeqCounters[POSITIVE_CAT]) / self._mTotalSeqs
/ self._mModel['mTypicalPositiveRatio'])
r = r * self._mFreqChar / self._mTotalChar
if r >= 1.0:
r = 0.99
return r
| mit |
notnownikki/quatloo | quatloo/qanda/tests.py | 1 | 7688 | from django.test import TestCase, SimpleTestCase
from qanda.parser import parse_qa, ParserError
from qanda.models import Question, Answer
from qanda.factory import get_question, get_answer
class QAParserTestCase(SimpleTestCase):
def test_extract_question_and_url(self):
qa = 'How do I do the thing? http://learntodoathing.com/'
expected = {
'question': 'How do I do the thing?',
'keywords': '',
'url': 'http://learntodoathing.com/',
'answer': ''
}
self.assertEqual(parse_qa(qa), expected)
def test_extract_question_and_answer(self):
qa = 'How do I do the thing? You must believe you can do the thing.'
expected = {
'question': 'How do I do the thing?',
'keywords': '',
'url': '',
'answer': 'You must believe you can do the thing.'
}
self.assertEqual(parse_qa(qa), expected)
def test_extract_question_answer_url(self):
qa = 'How do I do the thing? Believe you can! http://doathing.com/'
expected = {
'question': 'How do I do the thing?',
'keywords': '',
'url': 'http://doathing.com/',
'answer': 'Believe you can!'
}
self.assertEqual(parse_qa(qa), expected)
def test_questions_and_answers_can_talk_about_http(self):
qa = 'How do I redirect from https to https? Just redirect from http to https. http://beggingthequestion.com/'
expected = {
'question': 'How do I redirect from https to https?',
'keywords': '',
'url': 'http://beggingthequestion.com/',
'answer': 'Just redirect from http to https.'
}
self.assertEqual(parse_qa(qa), expected)
def test_keywords_are_added_to_the_question(self):
qa = 'How do I change the theme? (themes, style, styles) Use our handy tool! https://example.com/'
expected = {
'question': 'How do I change the theme?',
'keywords': 'themes, style, styles',
'url': 'https://example.com/',
'answer': 'Use our handy tool!'
}
self.assertEqual(parse_qa(qa), expected)
def test_fields_stripped_of_whitespace(self):
qa = ' How do I do the thing ? Believe you can! http://doathing.com/ '
expected = {
'question': 'How do I do the thing?',
'keywords': '',
'url': 'http://doathing.com/',
'answer': 'Believe you can!'
}
self.assertEqual(parse_qa(qa), expected)
def test_only_question(self):
qa = 'How do I do the thing?'
expected = {
'question': 'How do I do the thing?',
'keywords': '',
'url': '',
'answer': ''
}
self.assertEqual(parse_qa(qa), expected)
def test_no_question(self):
qa = 'I liek chocolate milk'
self.assertRaises(ParserError, parse_qa, qa)
class QuestionFactoryTestCase(TestCase):
def test_new_question(self):
question = get_question(
question_txt='How do I do a thing?', keywords='stuff, things')
self.assertEqual(question.question, 'How do I do a thing?')
self.assertEqual(question.keywords, 'stuff, things')
def test_update_question_keywords(self):
question = Question.objects.create(
question='How do I do a thing?', keywords='things, stuff')
question = get_question(
question_txt='How do I do a thing?', keywords='jam, cakes')
self.assertEqual(question.keywords, 'cakes, jam, stuff, things')
class AnswerFactoryTestCase(TestCase):
def test_new_answer(self):
question = Question.objects.create(
question='How do I do a thing?', keywords='things, stuff')
answer = get_answer(
question=question, url='http://example.com/',
answer_txt='Here is an example.')
self.assertEqual(answer.question, question)
self.assertEqual(answer.url, 'http://example.com/')
self.assertEqual(answer.answer, 'Here is an example.')
self.assertEqual(Answer.objects.all().count(), 1)
def test_new_answer_with_empty_url(self):
question = Question.objects.create(
question='How do I do a thing?', keywords='things, stuff')
answer = get_answer(
question=question, answer_txt='Here is an example.', url='')
self.assertEqual(answer.question, question)
self.assertEqual(answer.url, '')
self.assertEqual(answer.answer, 'Here is an example.')
self.assertEqual(Answer.objects.all().count(), 1)
def test_additional_answer(self):
question = Question.objects.create(
question='How do I do a thing?', keywords='things, stuff')
Answer.objects.create(
question=question, url='http://example.com/',
answer='Here is an example.')
answer = get_answer(
question=question, url='http://other-example.com/',
answer_txt='Here is another example.')
self.assertEqual(answer.question, question)
self.assertEqual(answer.url, 'http://other-example.com/')
self.assertEqual(answer.answer, 'Here is another example.')
self.assertEqual(Answer.objects.all().count(), 2)
def test_answer_text_updated(self):
question = Question.objects.create(
question='How do I do a thing?', keywords='things, stuff')
Answer.objects.create(
question=question, url='http://example.com/',
answer='Old answer')
answer = get_answer(
question=question, url='http://example.com/',
answer_txt='Here is an example.')
self.assertEqual(answer.question, question)
self.assertEqual(answer.url, 'http://example.com/')
self.assertEqual(answer.answer, 'Here is an example.')
self.assertEqual(Answer.objects.all().count(), 1)
def test_answer_text_not_updated_if_blank(self):
question = Question.objects.create(
question='How do I do a thing?', keywords='things, stuff')
Answer.objects.create(
question=question, url='http://example.com/',
answer='Old answer')
answer = get_answer(
question=question, url='http://example.com/',
answer_txt='')
self.assertEqual(answer.question, question)
self.assertEqual(answer.url, 'http://example.com/')
self.assertEqual(answer.answer, 'Old answer')
self.assertEqual(Answer.objects.all().count(), 1)
class QuestionMatchTestCase(SimpleTestCase):
"""
We need to override Django's transaction handling to make sure
the fulltext index is used when we insert test data.
"""
allow_database_queries = True
def tearDown(self):
super(QuestionMatchTestCase, self).tearDown()
Question.objects.all().delete()
def test_match_against_question(self):
question = Question(
question='How do I make a widget?',
keywords='custom, widgets, easteregg')
question.save()
questions = Question.match('I want to make a widget.')
self.assertEqual(1, len(list(questions)))
self.assertEqual(question.id, questions[0].id)
def test_match_against_keywords(self):
question = Question(
question='How do I make a widget?',
keywords='custom, widgets, easteregg')
question.save()
questions = Question.match('Show an easteregg please.')
self.assertEqual(1, len(list(questions)))
self.assertEqual(question.id, questions[0].id)
| gpl-3.0 |
googleads/google-ads-python | google/ads/googleads/v7/services/services/customer_client_link_service/transports/__init__.py | 2 | 1093 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import CustomerClientLinkServiceTransport
from .grpc import CustomerClientLinkServiceGrpcTransport
# Compile a registry of transports.
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[CustomerClientLinkServiceTransport]]
_transport_registry["grpc"] = CustomerClientLinkServiceGrpcTransport
__all__ = (
"CustomerClientLinkServiceTransport",
"CustomerClientLinkServiceGrpcTransport",
)
| apache-2.0 |
AntonelliLab/seqcap_processor | bin/aTRAM-master/atram.py | 1 | 8421 | #!/usr/bin/env python3
"""
Start atram.
This wrapper module parses the input arguments and passes them to the module
that does the actual processing (core_atram.py).
"""
import os
import argparse
import textwrap
import lib.db as db
import lib.log as log
import lib.bio as bio
import lib.util as util
import lib.blast as blast
import lib.assembler as assembly
from lib.core_atram import assemble
def parse_command_line():
"""Process command-line arguments."""
description = """
This takes a query sequence and a blast database built with the
atram_preprocessor.py script and builds assemblies.
If you specify more than one query sequence and/or more than one blast
database then aTRAM will build one assembly for each query/blast
DB pair.
NOTE: You may use a text file to hold the command-line arguments
like: @/path/to/args.txt. This is particularly useful when specifying
multiple blast databases or multiple query sequences.
"""
parser = argparse.ArgumentParser(
fromfile_prefix_chars='@',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent(description))
parser.add_argument('--version', action='version',
version='%(prog)s {}'.format(db.ATRAM_VERSION))
group = parser.add_argument_group('required arguments')
group.add_argument(
'-b', '--blast-db', '--sra', '--db', '--database',
required=True, metavar='DB', nargs='+',
help="""This needs to match the DB prefix you entered for
atram_preprocessor.py. You may repeat this argument to run the
--query sequence(s) against multiple blast databases.""")
group.add_argument(
'-q', '--query', '--target', '--probe', required=False, nargs='+',
help="""The path to the fasta file with sequences of interest. You may
repeat this argument. If you do then Each --query sequence file
will be run against every --blast-db.""")
group.add_argument(
'-Q', '--query-split', '--target-split', required=False, nargs='+',
help="""The path to the fasta file with multiple sequences of interest.
This will take every sequence in the fasta file and treat it as if
it were its own --query argument. So every sequence in
--query-split will be run against every --blast-db.""")
group.add_argument(
'-o', '--output-prefix', required=True,
help="""This is the prefix of all of the output files. So you can
identify different blast output file sets. You may include a
directory as part of the prefix. aTRAM will add suffixes to
differentiate output files.""")
group.add_argument(
'-a', '--assembler', default='none',
choices=['abyss', 'trinity', 'velvet', 'spades', 'none'],
help="""Which assembler to use. Choosing "none" (the default) will do
a single blast run and stop before any assembly.""")
group.add_argument(
'-i', '--iterations', type=int, default=5, metavar='N',
help="""The number of pipeline iterations. The default is "5".""")
group.add_argument(
'-p', '--protein', action='store_true',
help="""Are the query sequences protein? aTRAM will guess if you skip
this argument.""")
group.add_argument(
'--fraction', type=float, default=1.0,
help="""Use only the specified fraction of the aTRAM database. The
default is 1.0.""")
cpus = min(10, os.cpu_count() - 4 if os.cpu_count() > 4 else 1)
group.add_argument(
'--cpus', '--processes', '--max-processes', type=int, default=cpus,
help="""Number of CPU processors to use. This will also be used for
the assemblers when possible. We will use {} out of {} CPUs.
""".format(cpus, os.cpu_count()))
group.add_argument('--log-file', help="""Log file (full path)".""")
group.add_argument(
'--log-level', choices=['debug', 'info', 'error'], default='info',
help="""Log messages of the given level (or above). 'debug' shows the
most messages and 'error' shows the least. The default is
'info'""")
group.add_argument(
'--path',
help="""If the assembler or blast you want to use is not in your $PATH\
then use this to prepend directories to your path.""")
group.add_argument(
'-t', '--temp-dir', metavar='DIR',
help="""Place temporary files in this directory. All files will be
deleted after aTRAM completes. The directory must exist.""")
group.add_argument(
'--keep-temp-dir', action='store_true',
help="""This flag will keep the temporary files in the --temp-dir
around for debugging.""")
group.add_argument(
'-T', '--timeout', metavar='SECONDS', default=600, type=int,
help="""How many seconds to wait for an assembler or BLAST before
stopping the run. To wait forever set this to 0. The default
is "600" (10 minutes).""")
group = parser.add_argument_group(
'optional values for blast-filtering contigs')
group.add_argument(
'--no-filter', action='store_true',
help="""Do not filter the assembled contigs. This will: set both the
--bit-score and --contig-length to 0""")
group.add_argument(
'--bit-score', type=float, default=70.0, metavar='SCORE',
help="""Remove contigs that have a value less than this. The default
is "70.0". This is turned off by the --no-filter argument.""")
group.add_argument(
'--contig-length', '--length', type=int, default=100,
help="""Remove blast hits that are shorter than this length. The
default is "100". This is turned off by the --no-filter argument.
""")
blast.command_line_args(parser)
assembly.command_line_args(parser)
args = vars(parser.parse_args())
check_query_args(args)
blast.check_args(args)
# Set defaults and adjust arguments based on other arguments
args['cov_cutoff'] = assembly.default_cov_cutoff(args['cov_cutoff'])
args['blast_db'] = blast.touchup_blast_db_names(args['blast_db'])
args['kmer'] = assembly.default_kmer(args['kmer'], args['assembler'])
args['max_target_seqs'] = blast.default_max_target_seqs(
args['max_target_seqs'], args['blast_db'], args['max_memory'])
# Timeout: As always, None != 0
args['timeout'] = max(0, args['timeout'])
if not(args['timeout']):
args['timeout'] = None
setup_blast_args(args)
set_protein_arg(args)
setup_path_arg(args)
find_programs(args)
util.temp_dir_exists(args['temp_dir'], args.get('debug_dir'))
util.set_blast_batch_size(args['batch_size'])
return args
def setup_path_arg(args):
"""Prepend to PATH environment variable if requested."""
if args['path']:
os.environ['PATH'] = '{}:{}'.format(args['path'], os.environ['PATH'])
def setup_blast_args(args):
"""Set up the blast args."""
if args['no_filter']:
args['bit_score'] = 0
args['contig_length'] = 0
def check_query_args(args):
"""Validate the query arguments."""
if not args.get('query') and not args.get('query_split'):
err = 'You must have at least one --query or --query-split argument.'
log.fatal(err)
def set_protein_arg(args):
"""Set up the protein argument."""
if not args['protein'] and args['query']:
args['protein'] = bio.fasta_file_has_protein(args['query'])
def find_programs(args):
"""Make sure we can find the programs needed by the assembler and blast."""
blast.find_program('makeblastdb')
blast.find_program('tblastn')
blast.find_program('blastn')
assembly.find_program(
'abyss', 'bwa', args['assembler'], not args['no_long_reads'])
assembly.find_program('trinity', 'Trinity', args['assembler'])
assembly.find_program(
'trinity', 'Trinity', args['assembler'], args['bowtie2'])
assembly.find_program('velvet', 'velveth', args['assembler'])
assembly.find_program('velvet', 'velvetg', args['assembler'])
assembly.find_program('spades', 'spades.py', args['assembler'])
if __name__ == '__main__':
ARGS = parse_command_line()
assemble(ARGS)
| mit |
jctanner/ansible | hacking/backport/backport_of_line_adder.py | 40 | 9977 | #!/usr/bin/env python
# (c) 2020, Red Hat, Inc. <relrod@redhat.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from github.PullRequest import PullRequest
from github import Github
import os
import re
import sys
PULL_URL_RE = re.compile(r'(?P<user>\S+)/(?P<repo>\S+)#(?P<ticket>\d+)')
PULL_HTTP_URL_RE = re.compile(r'https?://(?:www\.|)github.com/(?P<user>\S+)/(?P<repo>\S+)/pull/(?P<ticket>\d+)')
PULL_BACKPORT_IN_TITLE = re.compile(r'.*\(#?(?P<ticket1>\d+)\)|\(backport of #?(?P<ticket2>\d+)\).*', re.I)
PULL_CHERRY_PICKED_FROM = re.compile(r'\(?cherry(?:\-| )picked from(?: ?commit|) (?P<hash>\w+)(?:\)|\.|$)')
TICKET_NUMBER = re.compile(r'(?:^|\s)#(\d+)')
def normalize_pr_url(pr, allow_non_ansible_ansible=False, only_number=False):
'''
Given a PullRequest, or a string containing a PR number, PR URL,
or internal PR URL (e.g. ansible-collections/community.general#1234),
return either a full github URL to the PR (if only_number is False),
or an int containing the PR number (if only_number is True).
Throws if it can't parse the input.
'''
if isinstance(pr, PullRequest):
return pr.html_url
if pr.isnumeric():
if only_number:
return int(pr)
return 'https://github.com/ansible/ansible/pull/{0}'.format(pr)
# Allow for forcing ansible/ansible
if not allow_non_ansible_ansible and 'ansible/ansible' not in pr:
raise Exception('Non ansible/ansible repo given where not expected')
re_match = PULL_HTTP_URL_RE.match(pr)
if re_match:
if only_number:
return int(re_match.group('ticket'))
return pr
re_match = PULL_URL_RE.match(pr)
if re_match:
if only_number:
return int(re_match.group('ticket'))
return 'https://github.com/{0}/{1}/pull/{2}'.format(
re_match.group('user'),
re_match.group('repo'),
re_match.group('ticket'))
raise Exception('Did not understand given PR')
def url_to_org_repo(url):
'''
Given a full Github PR URL, extract the user/org and repo name.
Return them in the form: "user/repo"
'''
match = PULL_HTTP_URL_RE.match(url)
if not match:
return ''
return '{0}/{1}'.format(match.group('user'), match.group('repo'))
def generate_new_body(pr, source_pr):
'''
Given the new PR (the backport) and the originating (source) PR,
construct the new body for the backport PR.
If the backport follows the usual ansible/ansible template, we look for the
'##### SUMMARY'-type line and add our "Backport of" line right below that.
If we can't find the SUMMARY line, we add our line at the very bottom.
This function does not side-effect, it simply returns the new body as a
string.
'''
backport_text = '\nBackport of {0}\n'.format(source_pr)
body_lines = pr.body.split('\n')
new_body_lines = []
added = False
for line in body_lines:
if 'Backport of http' in line:
raise Exception('Already has a backport line, aborting.')
new_body_lines.append(line)
if line.startswith('#') and line.strip().endswith('SUMMARY'):
# This would be a fine place to add it
new_body_lines.append(backport_text)
added = True
if not added:
# Otherwise, no '#### SUMMARY' line, so just add it at the bottom
new_body_lines.append(backport_text)
return '\n'.join(new_body_lines)
def get_prs_for_commit(g, commit):
'''
Given a commit hash, attempt to find the hash in any repo in the
ansible orgs, and then use it to determine what, if any, PR it appeared in.
'''
commits = g.search_commits(
'hash:{0} org:ansible org:ansible-collections is:public'.format(commit)
).get_page(0)
if not commits or len(commits) == 0:
return []
pulls = commits[0].get_pulls().get_page(0)
if not pulls or len(pulls) == 0:
return []
return pulls
def search_backport(pr, g, ansible_ansible):
'''
Do magic. This is basically the "brain" of 'auto'.
It will search the PR (the newest PR - the backport) and try to find where
it originated.
First it will search in the title. Some titles include things like
"foo bar change (#12345)" or "foo bar change (backport of #54321)"
so we search for those and pull them out.
Next it will scan the body of the PR and look for:
- cherry-pick reference lines (e.g. "cherry-picked from commit XXXXX")
- other PRs (#nnnnnn) and (foo/bar#nnnnnnn)
- full URLs to other PRs
It will take all of the above, and return a list of "possibilities",
which is a list of PullRequest objects.
'''
possibilities = []
# 1. Try searching for it in the title.
title_search = PULL_BACKPORT_IN_TITLE.match(pr.title)
if title_search:
ticket = title_search.group('ticket1')
if not ticket:
ticket = title_search.group('ticket2')
try:
possibilities.append(ansible_ansible.get_pull(int(ticket)))
except Exception:
pass
# 2. Search for clues in the body of the PR
body_lines = pr.body.split('\n')
for line in body_lines:
# a. Try searching for a `git cherry-pick` line
cherrypick = PULL_CHERRY_PICKED_FROM.match(line)
if cherrypick:
prs = get_prs_for_commit(g, cherrypick.group('hash'))
possibilities.extend(prs)
continue
# b. Try searching for other referenced PRs (by #nnnnn or full URL)
tickets = [('ansible', 'ansible', ticket) for ticket in TICKET_NUMBER.findall(line)]
tickets.extend(PULL_HTTP_URL_RE.findall(line))
tickets.extend(PULL_URL_RE.findall(line))
if tickets:
for ticket in tickets:
# Is it a PR (even if not in ansible/ansible)?
# TODO: As a small optimization/to avoid extra calls to GitHub,
# we could limit this check to non-URL matches. If it's a URL,
# we know it's definitely a pull request.
try:
repo_path = '{0}/{1}'.format(ticket[0], ticket[1])
repo = ansible_ansible
if repo_path != 'ansible/ansible':
repo = g.get_repo(repo_path)
ticket_pr = repo.get_pull(int(ticket))
possibilities.append(ticket_pr)
except Exception:
pass
continue # Future-proofing
return possibilities
def prompt_add():
'''
Prompt the user and return whether or not they agree.
'''
res = input('Shall I add the reference? [Y/n]: ')
return res.lower() in ('', 'y', 'yes')
def commit_edit(new_pr, pr):
'''
Given the new PR (the backport), and the "possibility" that we have decided
on, prompt the user and then add the reference to the body of the new PR.
This method does the actual "destructive" work of editing the PR body.
'''
print('I think this PR might have come from:')
print(pr.title)
print('-' * 50)
print(pr.html_url)
if prompt_add():
new_body = generate_new_body(new_pr, pr.html_url)
new_pr.edit(body=new_body)
print('I probably added the reference successfully.')
if __name__ == '__main__':
if (
len(sys.argv) != 3 or
not sys.argv[1].isnumeric()
):
print('Usage: <new backport PR> <already merged PR, or "auto">')
sys.exit(1)
token = os.environ.get('GITHUB_TOKEN')
if not token:
print('Go to https://github.com/settings/tokens/new and generate a '
'token with "repo" access, then set GITHUB_TOKEN to that token.')
sys.exit(1)
# https://github.com/settings/tokens/new
g = Github(token)
ansible_ansible = g.get_repo('ansible/ansible')
try:
pr_num = normalize_pr_url(sys.argv[1], only_number=True)
new_pr = ansible_ansible.get_pull(pr_num)
except Exception:
print('Could not load PR {0}'.format(sys.argv[1]))
sys.exit(1)
if sys.argv[2] == 'auto':
print('Trying to find originating PR...')
possibilities = search_backport(new_pr, g, ansible_ansible)
if not possibilities:
print('No match found, manual review required.')
sys.exit(1)
# TODO: Logic above can return multiple possibilities/guesses, but we
# only handle one here. We can cycle/prompt through them or something.
# For now, use the first match, which is also the most likely
# candidate.
pr = possibilities[0]
commit_edit(new_pr, pr)
else:
try:
# TODO: Fix having to call this twice to save some regex evals
pr_num = normalize_pr_url(sys.argv[2], only_number=True, allow_non_ansible_ansible=True)
pr_url = normalize_pr_url(sys.argv[2], allow_non_ansible_ansible=True)
pr_repo = g.get_repo(url_to_org_repo(pr_url))
pr = pr_repo.get_pull(pr_num)
except Exception as e:
print(e)
print('Could not load PR {0}'.format(sys.argv[2]))
sys.exit(1)
commit_edit(new_pr, pr)
| gpl-3.0 |
argivaitv/argivaitv | repo/plugin.video.phstreams/resources/lib/sources/xmovies_mv_tv.py | 7 | 6858 | # -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json,random
from resources.lib.libraries import cleantitle
from resources.lib.libraries import cloudflare
from resources.lib.libraries import client
from resources.lib.resolvers import googleplus
class source:
def __init__(self):
self.base_link_1 = 'http://megashare9.tv'
self.base_link_2 = 'http://xmovies8.tv'
self.search_link = 'https://www.googleapis.com/customsearch/v1element?key=AIzaSyCVAXiUzRYsML1Pv6RwSG1gunmMikTzQqY&rsz=filtered_cse&num=10&hl=en&cx=010516920160860608720:7uiuzaiwcfg&googlehost=www.google.com&q=%s'
self.headers = {'X-Requested-With': 'XMLHttpRequest'}
self.player_link = '/lib/picasa.php'
self.player_post_1 = 'mx=%s&isseries=0&part=0'
self.player_post_2 = 'mx=%s&isseries=1&part=0'
self.player_post_3 = 'mx=%s&isseries=1&part=%s'
def get_movie(self, imdb, title, year):
try:
query = self.search_link % (urllib.quote_plus(title))
result = client.source(query)
result = json.loads(result)
result = result['results']
title = cleantitle.movie(title)
years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)]
result = [(i['url'], i['titleNoFormatting']) for i in result]
result = [i for i in result if any(x in i[0] for x in years) or any(x in i[1] for x in years)]
result = [(i[0], re.compile('(^Watch Full "|^Watch |)(.+? [(]\d{4}[)])').findall(i[1])) for i in result]
result = [(i[0], i[1][0][-1]) for i in result if len(i[1]) > 0]
result = [i[0] for i in result if title == cleantitle.movie(i[1])][0]
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_show(self, imdb, tvdb, tvshowtitle, year):
try:
url = '%s (%s)' % (tvshowtitle, year)
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_episode(self, url, imdb, tvdb, title, date, season, episode):
try:
tvshowtitle, year = re.compile('(.+?) [(](\d{4})[)]$').findall(url)[0]
season, episode = '%01d' % int(season), '%01d' % int(episode)
query = '%s season %s' % (tvshowtitle, season)
query = self.search_link % (urllib.quote_plus(query))
result = client.source(query)
result = json.loads(result)
result = result['results']
tvshowtitle = cleantitle.tv(tvshowtitle)
years = ['%s' % str(year), '%s' % str(int(year)+1), '%s' % str(int(year)-1)]
result = [(i['url'], i['titleNoFormatting']) for i in result]
result = [(i[0], re.compile('(^Watch Full "|^Watch |)(.+?[(]\d{4}[)])').findall(i[1])) for i in result]
result = [(i[0], i[1][0][-1].lower()) for i in result if len(i[1]) > 0]
result = [(i[0], re.compile('(.+) season (\d+)\s*[(](\d{4})[)]').findall(i[1])) for i in result]
result = [(i[0], cleantitle.tv(i[1][0][0]), i[1][0][1], i[1][0][2]) for i in result if len(i[1]) > 0]
result = [i for i in result if tvshowtitle == cleantitle.tv(i[1])]
result = [i for i in result if season == i[2]]
result = [(i[0], i[1], str(int(i[3]) - int(i[2]) + 1)) for i in result]
result = [i[0] for i in result if any(x in i[2] for x in years)][0]
result += '?S%02dE%02d' % (int(season), int(episode))
try: url = re.compile('//.+?(/.+)').findall(result)[0]
except: url = result
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
return url
except:
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
sources = []
if url == None: return sources
content = re.compile('(.+?)\?S\d*E\d*$').findall(url)
try: url, season, episode = re.compile('(.+?)\?S(\d*)E(\d*)$').findall(url)[0]
except: pass
self.base_link = random.choice([self.base_link_1, self.base_link_2])
post_id = re.compile('/.+?/(.+)').findall(url)[0].rsplit('/')[0]
player = urlparse.urljoin(self.base_link, self.player_link)
if len(content) == 0:
post = self.player_post_1 % post_id
else:
post = cloudflare.source(player, post=self.player_post_2 % post_id, headers=self.headers)
post = client.parseDOM(post, 'ul', attrs = {'class': 'movie-parts'})[0]
post = client.parseDOM(post, 'li')
post = [(client.parseDOM(i, 'a', ret='href'), client.parseDOM(i, 'a')) for i in post]
post = [(i[0][0], i[1][0]) for i in post if len(i[0]) > 0 and len(i[1]) > 0]
post = [i[0] for i in post if '%01d' % int(episode) == i[1]][0]
post = urlparse.parse_qs(urlparse.urlparse(post).query)['part_id'][0]
post = self.player_post_3 % (post_id, post)
url = cloudflare.source(player, post=post, headers=self.headers)
url = re.compile('<source\s+src="([^"]+)').findall(url)[0]
url = client.replaceHTMLCodes(url)
if 'google' in url: quality = googleplus.tag(url)[0]['quality']
else: quality = 'HD'
sources.append({'source': 'GVideo', 'quality': quality, 'provider': 'Xmovies', 'url': url})
return sources
except:
return sources
def resolve(self, url):
try:
if url.startswith('stack://'): return url
url = client.request(url, output='geturl')
if 'requiressl=yes' in url: url = url.replace('http://', 'https://')
else: url = url.replace('https://', 'http://')
return url
except:
return
| gpl-2.0 |
AmericanResearchInstitute/poweru-server | pr_services/user_system/organization_email_domain_manager.py | 1 | 1902 | """
OrgEmailDomain manager class
@author Chris Church <cchurch@americanri.com>
@copyright Copyright 2011 American Research Institute, Inc.
"""
from pr_services.object_manager import ObjectManager
from pr_services.rpc.service import service_method
import facade
class OrgEmailDomainManager(ObjectManager):
"""
Manage mappings between email domain and automatic organization and role
assignment.
"""
def __init__(self):
""" constructor """
ObjectManager.__init__(self)
self.getters.update({
'email_domain' : 'get_general',
'organization' : 'get_foreign_key',
'role' : 'get_foreign_key',
'effective_role' : 'get_foreign_key',
'effective_role_name' : 'get_general',
})
self.setters.update({
'email_domain' : 'set_general',
'organization' : 'set_foreign_key',
'role' : 'set_foreign_key',
})
self.my_django_model = facade.models.OrgEmailDomain
@service_method
def create(self, auth_token, email_domain, organization, role=None):
"""
Create a new OrgEmailDomain mapping
@param email_domain domain name to look for in user's email address
@param organization organization to be assigned
@param role role to be assigned within organization
@return a reference to the newly created OrgEmailDomain
"""
organization_object = self._find_by_id(organization, facade.models.Organization)
role_object = self._find_by_id(role, facade.models.OrgRole) if role else None
obj = self.my_django_model.objects.create(email_domain=email_domain, organization=organization_object, role=role_object)
self.authorizer.check_create_permissions(auth_token, obj)
return obj
# vim:tabstop=4 shiftwidth=4 expandtab
| bsd-3-clause |
ldirer/scikit-learn | doc/sphinxext/sphinx_issues.py | 44 | 4076 | # -*- coding: utf-8 -*-
"""A Sphinx extension for linking to your project's issue tracker.
Copyright 2014 Steven Loria
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from docutils import nodes, utils
from sphinx.util.nodes import split_explicit_title
__version__ = '0.2.0'
__author__ = 'Steven Loria'
__license__ = 'MIT'
def user_role(name, rawtext, text, lineno,
inliner, options=None, content=None):
"""Sphinx role for linking to a user profile. Defaults to linking to
Github profiles, but the profile URIS can be configured via the
``issues_user_uri`` config value.
Example: ::
:user:`sloria`
"""
options = options or {}
content = content or []
has_explicit_title, title, target = split_explicit_title(text)
target = utils.unescape(target).strip()
title = utils.unescape(title).strip()
config = inliner.document.settings.env.app.config
if config.issues_user_uri:
ref = config.issues_user_uri.format(user=target)
else:
ref = 'https://github.com/{0}'.format(target)
if has_explicit_title:
text = title
else:
text = '@{0}'.format(target)
link = nodes.reference(text=text, refuri=ref, **options)
return [link], []
def _make_issue_node(issue_no, config, options=None):
options = options or {}
if issue_no not in ('-', '0'):
if config.issues_uri:
ref = config.issues_uri.format(issue=issue_no)
elif config.issues_github_path:
ref = 'https://github.com/{0}/issues/{1}'.format(
config.issues_github_path, issue_no
)
issue_text = '#{0}'.format(issue_no)
link = nodes.reference(text=issue_text, refuri=ref, **options)
else:
link = None
return link
def issue_role(name, rawtext, text, lineno,
inliner, options=None, content=None):
"""Sphinx role for linking to an issue. Must have
`issues_uri` or `issues_github_path` configured in ``conf.py``.
Examples: ::
:issue:`123`
:issue:`42,45`
"""
options = options or {}
content = content or []
issue_nos = [each.strip() for each in utils.unescape(text).split(',')]
config = inliner.document.settings.env.app.config
ret = []
for i, issue_no in enumerate(issue_nos):
node = _make_issue_node(issue_no, config, options=options)
ret.append(node)
if i != len(issue_nos) - 1:
sep = nodes.raw(text=', ', format='html')
ret.append(sep)
return ret, []
def setup(app):
# Format template for issues URI
# e.g. 'https://github.com/sloria/marshmallow/issues/{issue}
app.add_config_value('issues_uri', default=None, rebuild='html')
# Shortcut for Github, e.g. 'sloria/marshmallow'
app.add_config_value('issues_github_path', default=None, rebuild='html')
# Format template for user profile URI
# e.g. 'https://github.com/{user}'
app.add_config_value('issues_user_uri', default=None, rebuild='html')
app.add_role('issue', issue_role)
app.add_role('user', user_role)
| bsd-3-clause |
davidt/reviewboard | reviewboard/reviews/detail.py | 2 | 27814 | """Definitions for the review request detail view."""
from __future__ import unicode_literals
from collections import Counter, defaultdict
from datetime import datetime
from django.db.models import Q
from django.template.loader import render_to_string
from django.utils import six
from django.utils.timezone import utc
from django.utils.translation import ugettext as _
from reviewboard.reviews.builtin_fields import ReviewRequestPageDataMixin
from reviewboard.reviews.features import status_updates_feature
from reviewboard.reviews.fields import get_review_request_fieldsets
from reviewboard.reviews.models import (BaseComment,
Comment,
FileAttachmentComment,
GeneralComment,
ReviewRequest,
ScreenshotComment,
StatusUpdate)
class ReviewRequestPageData(object):
"""Data for the review request page.
The review request detail page needs a lot of data from the database, and
going through the standard model relations will result in a lot more
queries than necessary. This class bundles all that data together and
handles pre-fetching and re-associating as necessary to limit the required
number of queries.
All of the attributes within the class may not be available until both
:py:meth:`query_data_pre_etag` and :py:meth:`query_data_post_etag` are
called.
This object is not meant to be public API, and may change at any time. You
should not use it in extension code.
Attributes:
body_bottom_replies (dict):
A mapping from a top-level review ID to a list of the
:py:class:`~reviewboard.reviews.models.Review` objects which reply
to it.
body_top_replies (dict):
A mapping from a top-level review ID to a list of the
:py:class:`~reviewboard.reviews.models.Review` objects which reply
to it.
comments (list):
A list of all comments associated with all reviews shown on the
page.
changedescs (list of reviewboard.changedescs.models.ChangeDescription):
All the change descriptions to be shown on the page.
diffsets (list of reviewboard.diffviewer.models.DiffSet):
All of the diffsets associated with the review request.
diffsets_by_id (dict):
A mapping from ID to
:py:class:`~reviewboard.diffviewer.models.DiffSet`.
draft (reviewboard.reviews.models.ReviewRequestDraft):
The active draft of the review request, if any. May be ``None``.
active file_attachments (list of reviewboard.attachments.models.FileAttachment):
All the active file attachments associated with the review request.
all_file_attachments (list of reviewboard.attachments.models.FileAttachment):
All the file attachments associated with the review request.
file_attachments_by_id (dict):
A mapping from ID to
:py:class:`~reviewboard.attachments.models.FileAttachment`
issues (list of reviewboard.reviews.models.BaseComment):
A list of all the comments (of all types) which are marked as issues.
issue_counts (dict):
A dictionary storing counts of the various issue states throughout
the page.
latest_changedesc_timestamp (datetime.datetime):
The timestamp of the most recent change description on the page.
latest_review_timestamp (datetime.datetime):
The timestamp of the most recent review on the page.
latest_timestamps_by_review_id (dict):
A mapping from top-level review ID to the latest timestamp of the
thread.
review_request (reviewboard.reviews.models.ReviewRequest):
The review request.
review_request_details (reviewboard.reviews.models.base_review_request_details.BaseReviewRequestDetails):
The review request (or the active draft thereof). In practice this
will either be a
:py:class:`~reviewboard.reviews.models.ReviewRequest` or a
:py:class:`~reviewboard.reviews.models.ReviewRequestDraft`.
reviews (list of reviewboard.reviews.models.Review):
All the reviews to be shown on the page. This includes any draft
reviews owned by the requesting user but not drafts owned by
others.
reviews_by_id (dict):
A mapping from ID to :py:class:`~reviewboard.reviews.models.Review`.
active_screenshots (list of reviewboard.reviews.models.Screenshot):
All the active screenshots associated with the review request.
all_screenshots (list of reviewboard.reviews.models.Screenshot):
All the screenshots associated with the review request.
screenshots_by_id (dict):
A mapping from ID to
:py:class:`~reviewboard.reviews.models.Screenshot`.
""" # noqa
def __init__(self, review_request, request):
"""Initialize the data object.
Args:
review_request (reviewboard.reviews.models.ReviewRequest):
The review request.
request (django.http.HttpRequest):
The HTTP request object.
"""
self.review_request = review_request
self.request = request
def query_data_pre_etag(self):
"""Perform initial queries for the page.
This method will populate only the data needed to compute the ETag. We
avoid everything else until later so as to do the minimum amount
possible before reporting to the client that they can just use their
cached copy.
"""
# Query for all the reviews that should be shown on the page (either
# ones which are public or draft reviews owned by the current user).
reviews_query = Q(public=True)
if self.request.user.is_authenticated():
reviews_query |= Q(user_id=self.request.user.pk)
self.reviews = list(
self.review_request.reviews
.filter(reviews_query)
.order_by('-timestamp')
.select_related('user')
)
if len(self.reviews) == 0:
self.latest_review_timestamp = datetime.fromtimestamp(0, utc)
else:
self.latest_review_timestamp = self.reviews[0].timestamp
# Get all the public ChangeDescriptions.
self.changedescs = list(
self.review_request.changedescs.filter(public=True))
if len(self.changedescs) == 0:
self.latest_changedesc_timestamp = datetime.fromtimestamp(0, utc)
else:
self.latest_changedesc_timestamp = self.changedescs[0].timestamp
# Get the active draft (if any).
self.draft = self.review_request.get_draft(self.request.user)
# Get diffsets.
self.diffsets = self.review_request.get_diffsets()
self.diffsets_by_id = self._build_id_map(self.diffsets)
def query_data_post_etag(self):
"""Perform remaining queries for the page.
This method will populate everything else needed for the display of the
review request page other than that which was required to compute the
ETag.
"""
self.reviews_by_id = self._build_id_map(self.reviews)
self.body_top_replies = defaultdict(list)
self.body_bottom_replies = defaultdict(list)
self.latest_timestamps_by_review_id = {}
for r in self.reviews:
r._body_top_replies = []
r._body_bottom_replies = []
if r.body_top_reply_to_id is not None:
self.body_top_replies[r.body_top_reply_to_id].append(r)
if r.body_bottom_reply_to_id is not None:
self.body_bottom_replies[r.body_bottom_reply_to_id].append(r)
# Find the latest reply timestamp for each top-level review.
parent_id = r.base_reply_to_id
if parent_id is not None:
new_timestamp = r.timestamp.replace(tzinfo=utc)
if parent_id in self.latest_timestamps_by_review_id:
old_timestamp = \
self.latest_timestamps_by_review_id[parent_id]
if old_timestamp < new_timestamp:
self.latest_timestamps_by_review_id[parent_id] = \
new_timestamp
else:
self.latest_timestamps_by_review_id[parent_id] = \
new_timestamp
# Link up all the review body replies.
for reply_id, replies in six.iteritems(self.body_top_replies):
self.reviews_by_id[reply_id]._body_top_replies = reversed(replies)
for reply_id, replies in six.iteritems(self.body_bottom_replies):
self.reviews_by_id[reply_id]._body_bottom_replies = \
reversed(replies)
self.review_request_details = self.draft or self.review_request
# Get all the file attachments and screenshots.
#
# Note that we fetch both active and inactive file attachments and
# screenshots. We do this because even though they've been removed,
# they still will be rendered in change descriptions.
self.active_file_attachments = \
list(self.review_request_details.get_file_attachments())
self.all_file_attachments = (
self.active_file_attachments +
list(self.review_request_details.get_inactive_file_attachments()))
self.file_attachments_by_id = \
self._build_id_map(self.all_file_attachments)
for attachment in self.all_file_attachments:
attachment._comments = []
self.active_screenshots = \
list(self.review_request_details.get_screenshots())
self.all_screenshots = (
self.active_screenshots +
list(self.review_request_details.get_inactive_screenshots()))
self.screenshots_by_id = self._build_id_map(self.all_screenshots)
for screenshot in self.all_screenshots:
screenshot._comments = []
review_ids = self.reviews_by_id.keys()
# Get all status updates.
if status_updates_feature.is_enabled(request=self.request):
self.status_updates = list(
self.review_request.status_updates.all()
.select_related('review'))
self.comments = []
self.issues = []
self.issue_counts = {
'total': 0,
'open': 0,
'resolved': 0,
'dropped': 0,
}
for model, key, ordering in (
(Comment, 'diff_comments', ('comment__filediff',
'comment__first_line',
'comment__timestamp')),
(ScreenshotComment, 'screenshot_comments', None),
(FileAttachmentComment, 'file_attachment_comments', None),
(GeneralComment, 'general_comments', None)):
# Due to mistakes in how we initially made the schema, we have a
# ManyToManyField in between comments and reviews, instead of
# comments having a ForeignKey to the review. This makes it
# difficult to easily go from a comment to a review ID.
#
# The solution to this is to not query the comment objects, but
# rather the through table. This will let us grab the review and
# comment in one go, using select_related.
related_field = model.review.related.field
comment_field_name = related_field.m2m_reverse_field_name()
through = related_field.rel.through
q = through.objects.filter(review__in=review_ids).select_related()
if ordering:
q = q.order_by(*ordering)
objs = list(q)
# We do two passes. One to build a mapping, and one to actually
# process comments.
comment_map = {}
for obj in objs:
comment = getattr(obj, comment_field_name)
comment._type = key
comment._replies = []
comment_map[comment.pk] = comment
for obj in objs:
comment = getattr(obj, comment_field_name)
self.comments.append(comment)
# Short-circuit some object fetches for the comment by setting
# some internal state on them.
assert obj.review_id in self.reviews_by_id
review = self.reviews_by_id[obj.review_id]
comment.review_obj = review
comment._review_request = self.review_request
# If the comment has an associated object (such as a file
# attachment) that we've already fetched, attach it to prevent
# future queries.
if isinstance(comment, FileAttachmentComment):
attachment_id = comment.file_attachment_id
f = self.file_attachments_by_id[attachment_id]
comment.file_attachment = f
f._comments.append(comment)
diff_against_id = comment.diff_against_file_attachment_id
if diff_against_id is not None:
f = self.file_attachments_by_id[diff_against_id]
comment.diff_against_file_attachment = f
elif isinstance(comment, ScreenshotComment):
screenshot = self.screenshots_by_id[comment.screenshot_id]
comment.screenshot = screenshot
screenshot._comments.append(comment)
# We've hit legacy database cases where there were entries that
# weren't a reply, and were just orphaned. Ignore them.
if review.is_reply() and comment.is_reply():
replied_comment = comment_map[comment.reply_to_id]
replied_comment._replies.append(comment)
if review.public and comment.issue_opened:
status_key = \
comment.issue_status_to_string(comment.issue_status)
self.issue_counts[status_key] += 1
self.issue_counts['total'] += 1
self.issues.append(comment)
def _build_id_map(self, objects):
"""Return an ID map from a list of objects.
Args:
objects (list):
A list of objects queried via django.
Returns:
dict:
A dictionary mapping each ID to the resulting object.
"""
return {
obj.pk: obj
for obj in objects
}
class BaseReviewRequestPageEntry(object):
"""An entry on the review detail page.
This contains backend logic and frontend templates for one of the boxes
that appears below the main review request box on the review request detail
page.
Attributes:
timestamp (datetime.datetime):
The timestamp of the entry.
collasped (bool):
Whether the entry should be initially collapsed.
"""
#: The template to render for the HTML.
template_name = None
#: The template to render for any JavaScript.
js_template_name = None
def __init__(self, timestamp, collapsed):
"""Initialize the entry.
Args:
timestamp (datetime.datetime):
The timestamp of the entry.
collapsed (bool):
Whether the entry is collapsed by default.
"""
self.timestamp = timestamp
self.collapsed = collapsed
def finalize(self):
"""Perform final computations after all comments have been added."""
pass
class StatusUpdatesEntryMixin(object):
"""A mixin for any entries which can include status updates.
This provides common functionality for the two entries that include status
updates (the initial status updates entry and change description entries).
Attributes:
status_updates (list of reviewboard.reviews.models.StatusUpdate):
The status updates in this entry.
status_updates_by_review (dict):
A mapping from review ID to the matching status update.
"""
def __init__(self):
"""Initialize the entry."""
self.status_updates = []
self.status_updates_by_review = {}
def add_update(self, update):
"""Add a status update to the entry.
Args:
update (reviewboard.reviews.models.StatusUpdate):
The status update to add.
"""
self.status_updates.append(update)
self.status_updates_by_review[update.review_id] = update
update.comments = {
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [],
}
state = update.effective_state
if state in (StatusUpdate.DONE_FAILURE,
StatusUpdate.ERROR,
StatusUpdate.TIMEOUT):
update.header_class = 'status-update-state-failure'
elif state == StatusUpdate.PENDING:
update.header_class = 'status-update-state-pending'
elif state == StatusUpdate.DONE_SUCCESS:
update.header_class = 'status-update-state-success'
else:
raise ValueError('Unexpected state "%s"' % state)
if state == StatusUpdate.TIMEOUT:
description = _('timed out.')
else:
description = update.description
update.summary_html = render_to_string(
'reviews/status_update_summary.html',
{
'description': description,
'header_class': update.header_class,
'summary': update.summary,
'url': update.url,
'url_text': update.url_text,
})
def add_comment(self, comment_type, comment):
"""Add a comment to the entry.
This will associate the comment with the correct status update.
Args:
comment_type (unicode):
The type of comment (an index into the :py:attr:`comments`
dictionary).
comment (reviewboard.reviews.models.BaseComment):
The comment to add.
"""
update = self.status_updates_by_review[comment.review_obj.pk]
update.comments[comment_type].append(comment)
def finalize(self):
"""Perform final computations after all comments have been added."""
self.state_counts = Counter()
for update in self.status_updates:
self.state_counts[update.effective_state] += 1
summary_parts = []
if self.state_counts[StatusUpdate.DONE_FAILURE] > 0:
summary_parts.append(
_('%s failed') % self.state_counts[StatusUpdate.DONE_FAILURE])
if self.state_counts[StatusUpdate.DONE_SUCCESS] > 0:
summary_parts.append(
_('%s succeeded')
% self.state_counts[StatusUpdate.DONE_SUCCESS])
if self.state_counts[StatusUpdate.PENDING] > 0:
summary_parts.append(
_('%s pending') % self.state_counts[StatusUpdate.PENDING])
if self.state_counts[StatusUpdate.ERROR] > 0:
summary_parts.append(
_('%s failed with error')
% self.state_counts[StatusUpdate.PENDING])
if self.state_counts[StatusUpdate.TIMEOUT] > 0:
summary_parts.append(
_('%s timed out')
% self.state_counts[StatusUpdate.TIMEOUT])
if (self.state_counts[StatusUpdate.DONE_FAILURE] > 0 or
self.state_counts[StatusUpdate.ERROR] > 0 or
self.state_counts[StatusUpdate.TIMEOUT] > 0):
self.state_summary_class = 'status-update-state-failure'
elif self.state_counts[StatusUpdate.PENDING]:
self.state_summary_class = 'status-update-state-pending'
elif self.state_counts[StatusUpdate.DONE_SUCCESS]:
self.state_summary_class = 'status-update-state-success'
self.state_summary = ', '.join(summary_parts)
class InitialStatusUpdatesEntry(StatusUpdatesEntryMixin,
BaseReviewRequestPageEntry):
"""An entry for any status updates posted against the initial state.
:py:class:`~reviewboard.reviews.models.StatusUpdate` reviews (those created
by automated tools like static analysis checkers or CI systems) are shown
separately from ordinary reviews. When status updates are related to a
:py:class:`~reviewboard.changedescs.models.ChangeDescription`, they're
displayed within the change description box. Otherwise, they're shown in
their own box (immediately under the review request box), which is handled
by this class.
"""
template_name = 'reviews/boxes/initial_status_updates.html'
js_template_name = 'reviews/boxes/initial_status_updates.js'
def __init__(self, review_request, collapsed, data):
"""Initialize the entry.
Args:
review_request (reviewboard.reviews.models.ReviewRequest):
The review request that the change is for.
collapsed (bool):
Whether the entry is collapsed by default.
data (ReviewRequestPageData):
Pre-queried data for the review request page.
"""
StatusUpdatesEntryMixin.__init__(self)
BaseReviewRequestPageEntry.__init__(self, review_request.time_added,
collapsed)
@property
def has_content(self):
"""Whether there are any items to display in the entry.
Returns:
bool:
True if there are any initial status updates to display.
"""
return len(self.status_updates) > 0
class ReviewEntry(BaseReviewRequestPageEntry):
"""A review box.
Attributes:
review (reviewboard.reviews.models.Review):
The review for this entry.
issue_open_count (int):
The count of open issues within this review.
has_issues (bool):
Whether there are any issues (open or not).
comments (dict):
A dictionary of comments. Each key in this represents a comment
type, and the values are lists of comment objects.
"""
template_name = 'reviews/boxes/review.html'
js_template_name = 'reviews/boxes/review.js'
def __init__(self, request, review_request, review, collapsed, data):
"""Initialize the entry.
Args:
request (django.http.HttpRequest):
The request object.
review_request (reviewboard.reviews.models.ReviewRequest):
The review request that the change is for.
review (reviewboard.reviews.models.Review):
The review.
collapsed (bool):
Whether the entry is collapsed by default.
data (ReviewRequestPageData):
Pre-queried data for the review request page.
"""
super(ReviewEntry, self).__init__(review.timestamp, collapsed)
self.request = request
self.review_request = review_request
self.review = review
self.issue_open_count = 0
self.has_issues = False
self.comments = {
'diff_comments': [],
'screenshot_comments': [],
'file_attachment_comments': [],
'general_comments': [],
}
def add_comment(self, comment_type, comment):
"""Add a comment to this entry.
Args:
comment_type (unicode):
The type of comment (an index into the :py:attr:`comments`
dictionary).
comment (reviewboard.reviews.models.BaseComment):
The comment to add.
"""
self.comments[comment_type].append(comment)
if comment.issue_opened:
self.has_issues = True
if comment.issue_status == BaseComment.OPEN:
self.issue_open_count += 1
if self.review_request.submitter == self.request.user:
self.collapsed = False
class ChangeEntry(StatusUpdatesEntryMixin, BaseReviewRequestPageEntry):
"""A change description box.
Attributes:
changedesc (reviewboard.changedescs.models.ChangeDescription):
The change description for this entry.
"""
template_name = 'reviews/boxes/change.html'
js_template_name = 'reviews/boxes/change.js'
def __init__(self, request, review_request, changedesc, collapsed, data):
"""Initialize the entry.
Args:
request (django.http.HttpRequest):
The request object.
review_request (reviewboard.reviews.models.ReviewRequest):
The review request that the change is for.
changedesc (reviewboard.changedescs.models.ChangeDescription):
The change description for this entry.
collapsed (bool):
Whether the entry is collapsed by default.
data (ReviewRequestPageData):
Pre-queried data for the review request page.
"""
BaseReviewRequestPageEntry.__init__(self, changedesc.timestamp,
collapsed)
if status_updates_feature.is_enabled(request=request):
StatusUpdatesEntryMixin.__init__(self)
self.changedesc = changedesc
self.fields_changed_groups = []
cur_field_changed_group = None
# See if there was a review request status change.
status_change = changedesc.fields_changed.get('status')
if status_change:
assert 'new' in status_change
self.new_status = ReviewRequest.status_to_string(
status_change['new'][0])
else:
self.new_status = None
# Process the list of fields, in order by fieldset. These will be
# put into groups composed of inline vs. full-width field values,
# for render into the box.
fieldsets = get_review_request_fieldsets(
include_main=True,
include_change_entries_only=True)
for fieldset in fieldsets:
for field_cls in fieldset.field_classes:
field_id = field_cls.field_id
if field_id not in changedesc.fields_changed:
continue
inline = field_cls.change_entry_renders_inline
if (not cur_field_changed_group or
cur_field_changed_group['inline'] != inline):
# Begin a new group of fields.
cur_field_changed_group = {
'inline': inline,
'fields': [],
}
self.fields_changed_groups.append(cur_field_changed_group)
if issubclass(field_cls, ReviewRequestPageDataMixin):
field = field_cls(review_request, request=request,
data=data)
else:
field = field_cls(review_request, request=request)
cur_field_changed_group['fields'] += \
field.get_change_entry_sections_html(
changedesc.fields_changed[field_id])
| mit |
tornadozou/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/distribution_util_test.py | 9 | 13289 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for utility functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import numpy as np
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.contrib.linalg.python.ops import linear_operator_diag
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
import tensorflow.python.ops.nn_grad # pylint: disable=unused-import
from tensorflow.python.platform import test
def _powerset(x):
s = list(x)
return itertools.chain.from_iterable(
itertools.combinations(s, r) for r in range(len(s) + 1))
def _matrix_diag(d):
"""Batch version of np.diag."""
orig_shape = d.shape
d = np.reshape(d, (int(np.prod(d.shape[:-1])), d.shape[-1]))
diag_list = []
for i in range(d.shape[0]):
diag_list.append(np.diag(d[i, ...]))
return np.reshape(diag_list, orig_shape + (d.shape[-1],))
def _make_tril_scale(
loc=None,
scale_tril=None,
scale_diag=None,
scale_identity_multiplier=None,
shape_hint=None):
if scale_tril is not None:
scale_tril = np.tril(scale_tril)
if scale_diag is not None:
scale_tril += _matrix_diag(np.array(scale_diag, dtype=np.float32))
if scale_identity_multiplier is not None:
scale_tril += (
scale_identity_multiplier * _matrix_diag(np.ones(
[scale_tril.shape[-1]], dtype=np.float32)))
return scale_tril
return _make_diag_scale(
loc, scale_diag, scale_identity_multiplier, shape_hint)
def _make_diag_scale(
loc=None,
scale_diag=None,
scale_identity_multiplier=None,
shape_hint=None):
if scale_diag is not None:
scale_diag = np.asarray(scale_diag)
if scale_identity_multiplier is not None:
scale_diag += scale_identity_multiplier
return _matrix_diag(scale_diag)
if loc is None and shape_hint is None:
return None
if shape_hint is None:
shape_hint = loc.shape[-1]
if scale_identity_multiplier is None:
scale_identity_multiplier = 1.
return scale_identity_multiplier * np.diag(np.ones(shape_hint))
class MakeTrilScaleTest(test.TestCase):
def _testLegalInputs(
self, loc=None, shape_hint=None, scale_params=None):
for args in _powerset(scale_params.items()):
with self.test_session():
args = dict(args)
scale_args = dict({
"loc": loc,
"shape_hint": shape_hint}, **args)
expected_scale = _make_tril_scale(**scale_args)
if expected_scale is None:
# Not enough shape information was specified.
with self.assertRaisesRegexp(ValueError, ("is specified.")):
scale = distribution_util.make_tril_scale(**scale_args)
scale.to_dense().eval()
else:
scale = distribution_util.make_tril_scale(**scale_args)
self.assertAllClose(expected_scale, scale.to_dense().eval())
def testLegalInputs(self):
self._testLegalInputs(
loc=np.array([-1., -1.], dtype=np.float32),
shape_hint=2,
scale_params={
"scale_identity_multiplier": 2.,
"scale_diag": [2., 3.],
"scale_tril": [[1., 0.],
[-3., 3.]],
})
def testLegalInputsMultidimensional(self):
self._testLegalInputs(
loc=np.array([[[-1., -1., 2.], [-2., -3., 4.]]], dtype=np.float32),
shape_hint=3,
scale_params={
"scale_identity_multiplier": 2.,
"scale_diag": [[[2., 3., 4.], [3., 4., 5.]]],
"scale_tril": [[[[1., 0., 0.],
[-3., 3., 0.],
[1., -2., 1.]],
[[2., 1., 0.],
[-4., 7., 0.],
[1., -1., 1.]]]]
})
def testZeroTriU(self):
with self.test_session():
scale = distribution_util.make_tril_scale(scale_tril=[[1., 1], [1., 1.]])
self.assertAllClose([[1., 0], [1., 1.]], scale.to_dense().eval())
def testValidateArgs(self):
with self.test_session():
with self.assertRaisesOpError("diagonal part must be non-zero"):
scale = distribution_util.make_tril_scale(
scale_tril=[[0., 1], [1., 1.]], validate_args=True)
scale.to_dense().eval()
def testAssertPositive(self):
with self.test_session():
with self.assertRaisesOpError("diagonal part must be positive"):
scale = distribution_util.make_tril_scale(
scale_tril=[[-1., 1], [1., 1.]],
validate_args=True,
assert_positive=True)
scale.to_dense().eval()
class MakeDiagScaleTest(test.TestCase):
def _testLegalInputs(
self, loc=None, shape_hint=None, scale_params=None):
for args in _powerset(scale_params.items()):
with self.test_session():
args = dict(args)
scale_args = dict({
"loc": loc,
"shape_hint": shape_hint}, **args)
expected_scale = _make_diag_scale(**scale_args)
if expected_scale is None:
# Not enough shape information was specified.
with self.assertRaisesRegexp(ValueError, ("is specified.")):
scale = distribution_util.make_diag_scale(**scale_args)
scale.to_dense().eval()
else:
scale = distribution_util.make_diag_scale(**scale_args)
self.assertAllClose(expected_scale, scale.to_dense().eval())
def testLegalInputs(self):
self._testLegalInputs(
loc=np.array([-1., -1.], dtype=np.float32),
shape_hint=2,
scale_params={
"scale_identity_multiplier": 2.,
"scale_diag": [2., 3.]
})
def testLegalInputsMultidimensional(self):
self._testLegalInputs(
loc=np.array([[[-1., -1., 2.], [-2., -3., 4.]]], dtype=np.float32),
shape_hint=3,
scale_params={
"scale_identity_multiplier": 2.,
"scale_diag": [[[2., 3., 4.], [3., 4., 5.]]]
})
def testValidateArgs(self):
with self.test_session():
with self.assertRaisesOpError("diagonal part must be non-zero"):
scale = distribution_util.make_diag_scale(
scale_diag=[[0., 1], [1., 1.]], validate_args=True)
scale.to_dense().eval()
def testAssertPositive(self):
with self.test_session():
with self.assertRaisesOpError("diagonal part must be positive"):
scale = distribution_util.make_diag_scale(
scale_diag=[[-1., 1], [1., 1.]],
validate_args=True,
assert_positive=True)
scale.to_dense().eval()
class ShapesFromLocAndScaleTest(test.TestCase):
def test_static_loc_static_scale_non_matching_event_size_raises(self):
loc = constant_op.constant(np.zeros((2, 4)))
scale = linear_operator_diag.LinearOperatorDiag(np.ones((5, 1, 3)))
with self.assertRaisesRegexp(ValueError, "could not be broadcast"):
distribution_util.shapes_from_loc_and_scale(loc, scale)
def test_static_loc_static_scale(self):
loc = constant_op.constant(np.zeros((2, 3)))
scale = linear_operator_diag.LinearOperatorDiag(np.ones((5, 1, 3)))
batch_shape, event_shape = distribution_util.shapes_from_loc_and_scale(
loc, scale)
self.assertEqual(tensor_shape.TensorShape([5, 2]), batch_shape)
self.assertEqual(tensor_shape.TensorShape([3]), event_shape)
def test_static_loc_dynamic_scale(self):
loc = constant_op.constant(np.zeros((2, 3)))
diag = array_ops.placeholder(dtypes.float64)
scale = linear_operator_diag.LinearOperatorDiag(diag)
with self.test_session() as sess:
batch_shape, event_shape = sess.run(
distribution_util.shapes_from_loc_and_scale(loc, scale),
feed_dict={diag: np.ones((5, 1, 3))})
self.assertAllEqual([5, 2], batch_shape)
self.assertAllEqual([3], event_shape)
def test_dynamic_loc_static_scale(self):
loc = array_ops.placeholder(dtypes.float64)
diag = constant_op.constant(np.ones((5, 2, 3)))
scale = linear_operator_diag.LinearOperatorDiag(diag)
with self.test_session():
batch_shape, event_shape = distribution_util.shapes_from_loc_and_scale(
loc, scale)
# batch_shape depends on both args, and so is dynamic. Since loc did not
# have static shape, we inferred event shape entirely from scale, and this
# is available statically.
self.assertAllEqual(
[5, 2], batch_shape.eval(feed_dict={loc: np.zeros((2, 3))}))
self.assertAllEqual([3], event_shape)
def test_dynamic_loc_dynamic_scale(self):
loc = array_ops.placeholder(dtypes.float64)
diag = array_ops.placeholder(dtypes.float64)
scale = linear_operator_diag.LinearOperatorDiag(diag)
with self.test_session() as sess:
batch_shape, event_shape = sess.run(
distribution_util.shapes_from_loc_and_scale(loc, scale),
feed_dict={diag: np.ones((5, 2, 3)), loc: np.zeros((2, 3))})
self.assertAllEqual([5, 2], batch_shape)
self.assertAllEqual([3], event_shape)
def test_none_loc_static_scale(self):
loc = None
scale = linear_operator_diag.LinearOperatorDiag(np.ones((5, 1, 3)))
batch_shape, event_shape = distribution_util.shapes_from_loc_and_scale(
loc, scale)
self.assertEqual(tensor_shape.TensorShape([5, 1]), batch_shape)
self.assertEqual(tensor_shape.TensorShape([3]), event_shape)
def test_none_loc_dynamic_scale(self):
loc = None
diag = array_ops.placeholder(dtypes.float64)
scale = linear_operator_diag.LinearOperatorDiag(diag)
with self.test_session() as sess:
batch_shape, event_shape = sess.run(
distribution_util.shapes_from_loc_and_scale(loc, scale),
feed_dict={diag: np.ones((5, 1, 3))})
self.assertAllEqual([5, 1], batch_shape)
self.assertAllEqual([3], event_shape)
class TridiagTest(test.TestCase):
def testWorksCorrectlyNoBatches(self):
with self.test_session():
self.assertAllEqual(
[[4., 8., 0., 0.],
[1., 5., 9., 0.],
[0., 2., 6., 10.],
[0., 0., 3, 7.]],
distribution_util.tridiag(
[1., 2., 3.],
[4., 5., 6., 7.],
[8., 9., 10.]).eval())
def testWorksCorrectlyBatches(self):
with self.test_session():
self.assertAllClose(
[[[4., 8., 0., 0.],
[1., 5., 9., 0.],
[0., 2., 6., 10.],
[0., 0., 3, 7.]],
[[0.7, 0.1, 0.0, 0.0],
[0.8, 0.6, 0.2, 0.0],
[0.0, 0.9, 0.5, 0.3],
[0.0, 0.0, 1.0, 0.4]]],
distribution_util.tridiag(
[[1., 2., 3.],
[0.8, 0.9, 1.]],
[[4., 5., 6., 7.],
[0.7, 0.6, 0.5, 0.4]],
[[8., 9., 10.],
[0.1, 0.2, 0.3]]).eval(),
rtol=1e-5, atol=0.)
def testHandlesNone(self):
with self.test_session():
self.assertAllClose(
[[[4., 0., 0., 0.],
[0., 5., 0., 0.],
[0., 0., 6., 0.],
[0., 0., 0, 7.]],
[[0.7, 0.0, 0.0, 0.0],
[0.0, 0.6, 0.0, 0.0],
[0.0, 0.0, 0.5, 0.0],
[0.0, 0.0, 0.0, 0.4]]],
distribution_util.tridiag(
diag=[[4., 5., 6., 7.],
[0.7, 0.6, 0.5, 0.4]]).eval(),
rtol=1e-5, atol=0.)
class MixtureStddevTest(test.TestCase):
def test_mixture_dev(self):
mixture_weights = np.array([
[1.0/3, 1.0/3, 1.0/3],
[0.750, 0.250, 0.000]
])
component_means = np.array([
[1.0, 1.0, 1.0],
[-5, 0, 1.25]
])
component_devs = np.array([
[1.0, 1.0, 1.0],
[0.01, 2.0, 0.1]
])
# The first case should trivially have a standard deviation of 1.0 because
# all components are identical and have that standard deviation.
# The second case was computed by hand.
expected_devs = np.array([
1.0,
2.3848637277
])
weights_tf = array_ops.constant(mixture_weights)
means_tf = array_ops.constant(component_means)
sigmas_tf = array_ops.constant(component_devs)
mix_dev = distribution_util.mixture_stddev(weights_tf,
means_tf,
sigmas_tf)
with self.test_session() as sess:
actual_devs = sess.run(mix_dev)
self.assertAllClose(actual_devs, expected_devs)
if __name__ == "__main__":
test.main()
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.