docstring
stringlengths 52
499
| function
stringlengths 67
35.2k
| __index_level_0__
int64 52.6k
1.16M
|
|---|---|---|
Retrieves the number of seconds from a FAT date time.
Args:
fat_date_time (int): FAT date time.
Returns:
int: number of seconds since January 1, 1980 00:00:00.
Raises:
ValueError: if the month, day of month, hours, minutes or seconds
value is out of bounds.
|
def _GetNumberOfSeconds(self, fat_date_time):
day_of_month = (fat_date_time & 0x1f)
month = ((fat_date_time >> 5) & 0x0f)
year = (fat_date_time >> 9) & 0x7f
days_per_month = self._GetDaysPerMonth(year, month)
if day_of_month < 1 or day_of_month > days_per_month:
raise ValueError('Day of month value out of bounds.')
number_of_days = self._GetDayOfYear(1980 + year, month, day_of_month)
number_of_days -= 1
for past_year in range(0, year):
number_of_days += self._GetNumberOfDaysInYear(past_year)
fat_date_time >>= 16
seconds = (fat_date_time & 0x1f) * 2
minutes = (fat_date_time >> 5) & 0x3f
hours = (fat_date_time >> 11) & 0x1f
if hours not in range(0, 24):
raise ValueError('Hours value out of bounds.')
if minutes not in range(0, 60):
raise ValueError('Minutes value out of bounds.')
if seconds not in range(0, 60):
raise ValueError('Seconds value out of bounds.')
number_of_seconds = (((hours * 60) + minutes) * 60) + seconds
number_of_seconds += number_of_days * definitions.SECONDS_PER_DAY
return number_of_seconds
| 786,579
|
Initializes a FILETIME timestamp.
Args:
timestamp (Optional[int]): FILETIME timestamp.
|
def __init__(self, timestamp=None):
super(Filetime, self).__init__()
self._precision = definitions.PRECISION_100_NANOSECONDS
self._timestamp = timestamp
| 786,591
|
Initializes an OLE Automation date.
Args:
timestamp (Optional[float]): OLE Automation date.
|
def __init__(self, timestamp=None):
super(OLEAutomationDate, self).__init__()
self._precision = definitions.PRECISION_1_MICROSECOND
self._timestamp = timestamp
| 786,594
|
Initializes an UUID version 1 timestamp.
Args:
timestamp (Optional[int]): UUID version 1 timestamp.
Raises:
ValueError: if the UUID version 1 timestamp is invalid.
|
def __init__(self, timestamp=None):
if timestamp and (timestamp < 0 or timestamp > self._UINT60_MAX):
raise ValueError('Invalid UUID version 1 timestamp.')
super(UUIDTime, self).__init__()
self._precision = definitions.PRECISION_100_NANOSECONDS
self._timestamp = timestamp
| 786,595
|
Copies the number of microseconds to a fraction of second value.
Args:
microseconds (int): number of microseconds.
Returns:
decimal.Decimal: fraction of second, which must be a value between 0.0 and
1.0.
Raises:
ValueError: if the number of microseconds is out of bounds.
|
def CopyMicrosecondsToFractionOfSecond(cls, microseconds):
if microseconds < 0 or microseconds >= definitions.MICROSECONDS_PER_SECOND:
raise ValueError(
'Number of microseconds value: {0:d} out of bounds.'.format(
microseconds))
milliseconds, _ = divmod(
microseconds, definitions.MICROSECONDS_PER_MILLISECOND)
return decimal.Decimal(milliseconds) / definitions.MILLISECONDS_PER_SECOND
| 786,605
|
Copies the number of microseconds to a fraction of second value.
Args:
microseconds (int): number of microseconds.
Returns:
decimal.Decimal: fraction of second, which must be a value between 0.0 and
1.0.
Raises:
ValueError: if the number of microseconds is out of bounds.
|
def CopyMicrosecondsToFractionOfSecond(cls, microseconds):
if microseconds < 0 or microseconds >= definitions.MICROSECONDS_PER_SECOND:
raise ValueError(
'Number of microseconds value: {0:d} out of bounds.'.format(
microseconds))
return decimal.Decimal(microseconds) / definitions.MICROSECONDS_PER_SECOND
| 786,607
|
Creates a precision helper.
Args:
precision (str): precision of the date and time value, which should
be one of the PRECISION_VALUES in definitions.
Returns:
class: date time precision helper class.
Raises:
ValueError: if the precision value is unsupported.
|
def CreatePrecisionHelper(cls, precision):
precision_helper_class = cls._PRECISION_CLASSES.get(precision, None)
if not precision_helper_class:
raise ValueError('Unsupported precision: {0!s}'.format(precision))
return precision_helper_class
| 786,609
|
Copies a APFS timestamp from a date and time string.
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DD hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
Raises:
ValueError: if the date and time value is not supported.
|
def CopyFromDateTimeString(self, time_string):
super(APFSTime, self)._CopyFromDateTimeString(time_string)
if (self._timestamp is None or self._timestamp < self._INT64_MIN or
self._timestamp > self._INT64_MAX):
raise ValueError('Date time value not supported.')
| 786,610
|
Initializes a Cocoa timestamp.
Args:
timestamp (Optional[float]): Cocoa timestamp.
|
def __init__(self, timestamp=None):
super(CocoaTime, self).__init__()
self._precision = definitions.PRECISION_1_SECOND
self._timestamp = timestamp
| 786,615
|
Initializes time elements.
Args:
time_elements_tuple (Optional[tuple[int, int, int, int, int, int]]):
time elements, contains year, month, day of month, hours, minutes and
seconds.
Raises:
ValueError: if the time elements tuple is invalid.
|
def __init__(self, time_elements_tuple=None):
super(TimeElements, self).__init__()
self._number_of_seconds = None
self._precision = definitions.PRECISION_1_SECOND
self._time_elements_tuple = time_elements_tuple
if time_elements_tuple:
if len(time_elements_tuple) < 6:
raise ValueError((
'Invalid time elements tuple at least 6 elements required,'
'got: {0:d}').format(len(time_elements_tuple)))
self._number_of_seconds = self._GetNumberOfSecondsFromElements(
*time_elements_tuple)
| 786,626
|
Copies time elements from date and time values.
Args:
date_time_values (dict[str, int]): date and time values, such as year,
month, day of month, hours, minutes, seconds, microseconds.
|
def _CopyFromDateTimeValues(self, date_time_values):
year = date_time_values.get('year', 0)
month = date_time_values.get('month', 0)
day_of_month = date_time_values.get('day_of_month', 0)
hours = date_time_values.get('hours', 0)
minutes = date_time_values.get('minutes', 0)
seconds = date_time_values.get('seconds', 0)
self._normalized_timestamp = None
self._number_of_seconds = self._GetNumberOfSecondsFromElements(
year, month, day_of_month, hours, minutes, seconds)
self._time_elements_tuple = (
year, month, day_of_month, hours, minutes, seconds)
self.is_local_time = False
| 786,628
|
Copies time elements from a date and time string.
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DD hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
|
def CopyFromDateTimeString(self, time_string):
date_time_values = self._CopyDateTimeFromString(time_string)
self._CopyFromDateTimeValues(date_time_values)
| 786,630
|
Copies time elements from string-based time elements tuple.
Args:
time_elements_tuple (Optional[tuple[str, str, str, str, str, str]]):
time elements, contains year, month, day of month, hours, minutes and
seconds.
Raises:
ValueError: if the time elements tuple is invalid.
|
def CopyFromStringTuple(self, time_elements_tuple):
if len(time_elements_tuple) < 6:
raise ValueError((
'Invalid time elements tuple at least 6 elements required,'
'got: {0:d}').format(len(time_elements_tuple)))
try:
year = int(time_elements_tuple[0], 10)
except (TypeError, ValueError):
raise ValueError('Invalid year value: {0!s}'.format(
time_elements_tuple[0]))
try:
month = int(time_elements_tuple[1], 10)
except (TypeError, ValueError):
raise ValueError('Invalid month value: {0!s}'.format(
time_elements_tuple[1]))
try:
day_of_month = int(time_elements_tuple[2], 10)
except (TypeError, ValueError):
raise ValueError('Invalid day of month value: {0!s}'.format(
time_elements_tuple[2]))
try:
hours = int(time_elements_tuple[3], 10)
except (TypeError, ValueError):
raise ValueError('Invalid hours value: {0!s}'.format(
time_elements_tuple[3]))
try:
minutes = int(time_elements_tuple[4], 10)
except (TypeError, ValueError):
raise ValueError('Invalid minutes value: {0!s}'.format(
time_elements_tuple[4]))
try:
seconds = int(time_elements_tuple[5], 10)
except (TypeError, ValueError):
raise ValueError('Invalid seconds value: {0!s}'.format(
time_elements_tuple[5]))
self._normalized_timestamp = None
self._number_of_seconds = self._GetNumberOfSecondsFromElements(
year, month, day_of_month, hours, minutes, seconds)
self._time_elements_tuple = (
year, month, day_of_month, hours, minutes, seconds)
| 786,632
|
Initializes time elements.
Args:
fraction_of_second (Optional[decimal.Decimal]): fraction of second, which
must be a value between 0.0 and 1.0.
time_elements_tuple (Optional[tuple[int, int, int, int, int, int]]):
time elements, contains year, month, day of month, hours, minutes and
seconds.
Raises:
ValueError: if the time elements tuple is invalid or fraction of second
value is out of bounds.
|
def __init__(self, fraction_of_second=None, time_elements_tuple=None):
if fraction_of_second is not None:
if fraction_of_second < 0.0 or fraction_of_second >= 1.0:
raise ValueError(
'Fraction of second value: {0:f} out of bounds.'.format(
fraction_of_second))
super(TimeElementsWithFractionOfSecond, self).__init__(
time_elements_tuple=time_elements_tuple)
self._precision = None
self.fraction_of_second = fraction_of_second
| 786,634
|
Copies time elements from date and time values.
Args:
date_time_values (dict[str, int]): date and time values, such as year,
month, day of month, hours, minutes, seconds, microseconds.
Raises:
ValueError: if no helper can be created for the current precision.
|
def _CopyFromDateTimeValues(self, date_time_values):
year = date_time_values.get('year', 0)
month = date_time_values.get('month', 0)
day_of_month = date_time_values.get('day_of_month', 0)
hours = date_time_values.get('hours', 0)
minutes = date_time_values.get('minutes', 0)
seconds = date_time_values.get('seconds', 0)
microseconds = date_time_values.get('microseconds', 0)
precision_helper = precisions.PrecisionHelperFactory.CreatePrecisionHelper(
self._precision)
fraction_of_second = precision_helper.CopyMicrosecondsToFractionOfSecond(
microseconds)
self._normalized_timestamp = None
self._number_of_seconds = self._GetNumberOfSecondsFromElements(
year, month, day_of_month, hours, minutes, seconds)
self._time_elements_tuple = (
year, month, day_of_month, hours, minutes, seconds)
self.fraction_of_second = fraction_of_second
self.is_local_time = False
| 786,635
|
Copies time elements from string-based time elements tuple.
Args:
time_elements_tuple (Optional[tuple[str, str, str, str, str, str, str]]):
time elements, contains year, month, day of month, hours, minutes,
seconds and fraction of seconds.
Raises:
ValueError: if the time elements tuple is invalid.
|
def CopyFromStringTuple(self, time_elements_tuple):
if len(time_elements_tuple) < 7:
raise ValueError((
'Invalid time elements tuple at least 7 elements required,'
'got: {0:d}').format(len(time_elements_tuple)))
super(TimeElementsWithFractionOfSecond, self).CopyFromStringTuple(
time_elements_tuple)
try:
fraction_of_second = decimal.Decimal(time_elements_tuple[6])
except (TypeError, ValueError):
raise ValueError('Invalid fraction of second value: {0!s}'.format(
time_elements_tuple[6]))
if fraction_of_second < 0.0 or fraction_of_second >= 1.0:
raise ValueError('Fraction of second value: {0:f} out of bounds.'.format(
fraction_of_second))
self.fraction_of_second = fraction_of_second
| 786,636
|
Initializes time elements.
Args:
time_elements_tuple (Optional[tuple[int, int, int, int, int, int, int]]):
time elements, contains year, month, day of month, hours, minutes,
seconds and milliseconds.
Raises:
ValueError: if the time elements tuple is invalid.
|
def __init__(self, time_elements_tuple=None):
fraction_of_second = None
if time_elements_tuple:
if len(time_elements_tuple) < 7:
raise ValueError((
'Invalid time elements tuple at least 7 elements required,'
'got: {0:d}').format(len(time_elements_tuple)))
milliseconds = time_elements_tuple[6]
time_elements_tuple = time_elements_tuple[:6]
if (milliseconds < 0 or
milliseconds >= definitions.MILLISECONDS_PER_SECOND):
raise ValueError('Invalid number of milliseconds.')
fraction_of_second = (
decimal.Decimal(milliseconds) / definitions.MILLISECONDS_PER_SECOND)
super(TimeElementsInMilliseconds, self).__init__(
fraction_of_second=fraction_of_second,
time_elements_tuple=time_elements_tuple)
self._precision = definitions.PRECISION_1_MILLISECOND
| 786,638
|
Copies time elements from string-based time elements tuple.
Args:
time_elements_tuple (Optional[tuple[str, str, str, str, str, str, str]]):
time elements, contains year, month, day of month, hours, minutes,
seconds and milliseconds.
Raises:
ValueError: if the time elements tuple is invalid.
|
def CopyFromStringTuple(self, time_elements_tuple):
if len(time_elements_tuple) < 7:
raise ValueError((
'Invalid time elements tuple at least 7 elements required,'
'got: {0:d}').format(len(time_elements_tuple)))
year, month, day_of_month, hours, minutes, seconds, milliseconds = (
time_elements_tuple)
try:
milliseconds = int(milliseconds, 10)
except (TypeError, ValueError):
raise ValueError('Invalid millisecond value: {0!s}'.format(milliseconds))
if milliseconds < 0 or milliseconds >= definitions.MILLISECONDS_PER_SECOND:
raise ValueError('Invalid number of milliseconds.')
fraction_of_second = (
decimal.Decimal(milliseconds) / definitions.MILLISECONDS_PER_SECOND)
time_elements_tuple = (
year, month, day_of_month, hours, minutes, seconds,
str(fraction_of_second))
super(TimeElementsInMilliseconds, self).CopyFromStringTuple(
time_elements_tuple)
| 786,639
|
Initializes time elements.
Args:
time_elements_tuple (Optional[tuple[int, int, int, int, int, int, int]]):
time elements, contains year, month, day of month, hours, minutes,
seconds and microseconds.
Raises:
ValueError: if the time elements tuple is invalid.
|
def __init__(self, time_elements_tuple=None):
fraction_of_second = None
if time_elements_tuple:
if len(time_elements_tuple) < 7:
raise ValueError((
'Invalid time elements tuple at least 7 elements required,'
'got: {0:d}').format(len(time_elements_tuple)))
microseconds = time_elements_tuple[6]
time_elements_tuple = time_elements_tuple[:6]
if (microseconds < 0 or
microseconds >= definitions.MICROSECONDS_PER_SECOND):
raise ValueError('Invalid number of microseconds.')
fraction_of_second = (
decimal.Decimal(microseconds) / definitions.MICROSECONDS_PER_SECOND)
super(TimeElementsInMicroseconds, self).__init__(
fraction_of_second=fraction_of_second,
time_elements_tuple=time_elements_tuple)
self._precision = definitions.PRECISION_1_MICROSECOND
| 786,640
|
Copies time elements from string-based time elements tuple.
Args:
time_elements_tuple (Optional[tuple[str, str, str, str, str, str, str]]):
time elements, contains year, month, day of month, hours, minutes,
seconds and microseconds.
Raises:
ValueError: if the time elements tuple is invalid.
|
def CopyFromStringTuple(self, time_elements_tuple):
if len(time_elements_tuple) < 7:
raise ValueError((
'Invalid time elements tuple at least 7 elements required,'
'got: {0:d}').format(len(time_elements_tuple)))
year, month, day_of_month, hours, minutes, seconds, microseconds = (
time_elements_tuple)
try:
microseconds = int(microseconds, 10)
except (TypeError, ValueError):
raise ValueError('Invalid microsecond value: {0!s}'.format(microseconds))
if microseconds < 0 or microseconds >= definitions.MICROSECONDS_PER_SECOND:
raise ValueError('Invalid number of microseconds.')
fraction_of_second = (
decimal.Decimal(microseconds) / definitions.MICROSECONDS_PER_SECOND)
time_elements_tuple = (
year, month, day_of_month, hours, minutes, seconds,
str(fraction_of_second))
super(TimeElementsInMicroseconds, self).CopyFromStringTuple(
time_elements_tuple)
| 786,641
|
Initializes a SYSTEMTIME structure.
Args:
system_time_tuple
(Optional[tuple[int, int, int, int, int, int, int, int]]):
system time, contains year, month, day of week, day of month,
hours, minutes, seconds and milliseconds.
Raises:
ValueError: if the system time is invalid.
|
def __init__(self, system_time_tuple=None):
super(Systemtime, self).__init__()
self._number_of_seconds = None
self._precision = definitions.PRECISION_1_MILLISECOND
self.day_of_month = None
self.day_of_week = None
self.hours = None
self.milliseconds = None
self.minutes = None
self.month = None
self.seconds = None
self.year = None
if system_time_tuple:
if len(system_time_tuple) < 8:
raise ValueError('Invalid system time tuple 8 elements required.')
if system_time_tuple[0] < 1601 or system_time_tuple[0] > 30827:
raise ValueError('Year value out of bounds.')
if system_time_tuple[1] not in range(1, 13):
raise ValueError('Month value out of bounds.')
if system_time_tuple[2] not in range(0, 7):
raise ValueError('Day of week value out of bounds.')
days_per_month = self._GetDaysPerMonth(
system_time_tuple[0], system_time_tuple[1])
if system_time_tuple[3] < 1 or system_time_tuple[3] > days_per_month:
raise ValueError('Day of month value out of bounds.')
if system_time_tuple[4] not in range(0, 24):
raise ValueError('Hours value out of bounds.')
if system_time_tuple[5] not in range(0, 60):
raise ValueError('Minutes value out of bounds.')
# TODO: support a leap second?
if system_time_tuple[6] not in range(0, 60):
raise ValueError('Seconds value out of bounds.')
if system_time_tuple[7] < 0 or system_time_tuple[7] > 999:
raise ValueError('Milliseconds value out of bounds.')
self.day_of_month = system_time_tuple[3]
self.day_of_week = system_time_tuple[2]
self.hours = system_time_tuple[4]
self.milliseconds = system_time_tuple[7]
self.minutes = system_time_tuple[5]
self.month = system_time_tuple[1]
self.seconds = system_time_tuple[6]
self.year = system_time_tuple[0]
self._number_of_seconds = self._GetNumberOfSecondsFromElements(
self.year, self.month, self.day_of_month, self.hours, self.minutes,
self.seconds)
| 786,642
|
Copies a SYSTEMTIME structure from a date and time string.
Args:
time_string (str): date and time value formatted as:
YYYY-MM-DD hh:mm:ss.######[+-]##:##
Where # are numeric digits ranging from 0 to 9 and the seconds
fraction can be either 3 or 6 digits. The time of day, seconds
fraction and time zone offset are optional. The default time zone
is UTC.
Raises:
ValueError: if the date string is invalid or not supported.
|
def CopyFromDateTimeString(self, time_string):
date_time_values = self._CopyDateTimeFromString(time_string)
year = date_time_values.get('year', 0)
month = date_time_values.get('month', 0)
day_of_month = date_time_values.get('day_of_month', 0)
hours = date_time_values.get('hours', 0)
minutes = date_time_values.get('minutes', 0)
seconds = date_time_values.get('seconds', 0)
microseconds = date_time_values.get('microseconds', 0)
milliseconds, _ = divmod(
microseconds, definitions.MICROSECONDS_PER_MILLISECOND)
if year < 1601 or year > 30827:
raise ValueError('Unsupported year value: {0:d}.'.format(year))
self._normalized_timestamp = None
self._number_of_seconds = self._GetNumberOfSecondsFromElements(
year, month, day_of_month, hours, minutes, seconds)
self.year = year
self.month = month
self.day_of_month = day_of_month
# TODO: calculate day of week on demand.
self.day_of_week = None
self.hours = hours
self.minutes = minutes
self.seconds = seconds
self.milliseconds = milliseconds
self.is_local_time = False
| 786,644
|
Initializes the MeCab instance with the given options.
Args:
options: Optional string or dictionary of the MeCab options to be
used.
Kwargs:
debug (bool): Flag for outputting debug messages to stderr.
Raises:
SystemExit: An unrecognized option was passed in.
MeCabError: An error occurred in locating the MeCab library;
or the FFI handle to MeCab could not be created.
|
def __init__(self, options=None, **kwargs):
try:
env = MeCabEnv(**kwargs)
self.__ffi = _ffi_libmecab()
self.__mecab = self.__ffi.dlopen(env.libpath)
self.libpath = env.libpath
# Python 2/3 string support
self.__bytes2str, self.__str2bytes = string_support(env.charset)
# Python 2/3 sentence splitter/tokenizer support
self.__split_pattern, self.__split_features = splitter_support(env.charset)
# Set up dictionary of MeCab options to use
op = OptionParse(env.charset)
self.options = op.parse_mecab_options(options)
# Set up tagger pointer
ostr = op.build_options_str(self.options)
self.model = self.__mecab.mecab_model_new2(ostr)
if self.model == self.__ffi.NULL:
logger.error(self._ERROR_NULLPTR.format('Model'))
raise MeCabError(self._ERROR_NULLPTR.format('Model'))
self.tagger = self.__mecab.mecab_model_new_tagger(self.model)
if self.tagger == self.__ffi.NULL:
logger.error(self._ERROR_NULLPTR.format('Tagger'))
raise MeCabError(self._ERROR_NULLPTR.format('Tagger'))
self.lattice = self.__mecab.mecab_model_new_lattice(self.model)
if self.lattice == self.__ffi.NULL:
logger.error(self._ERROR_NULLPTR.format('Lattice'))
raise MeCabError(self._ERROR_NULLPTR.format('Lattice'))
n = self.options.get('nbest', 1)
if n > 1:
req_type = self.MECAB_LATTICE_NBEST
else:
req_type = self.MECAB_LATTICE_ONE_BEST
self.__mecab.mecab_lattice_set_request_type(self.lattice, req_type)
if 'partial' in self.options:
self.__mecab.mecab_lattice_add_request_type(
self.lattice, self.MECAB_LATTICE_PARTIAL)
if 'marginal' in self.options:
self.__mecab.mecab_lattice_add_request_type(
self.lattice, self.MECAB_LATTICE_MARGINAL_PROB)
if 'all_morphs' in self.options:
# required when node parsing
self.__mecab.mecab_lattice_add_request_type(
self.lattice, self.MECAB_LATTICE_ALL_MORPHS)
if 'allocate_sentence' in self.options:
self.__mecab.mecab_lattice_add_request_type(
self.lattice, self.MECAB_LATTICE_ALLOCATE_SENTENCE)
# Prepare copy for list of MeCab dictionaries
self.dicts = []
dptr = self.__mecab.mecab_model_dictionary_info(self.model)
while dptr != self.__ffi.NULL:
fpath = self.__bytes2str(self.__ffi.string(dptr.filename))
fpath = os.path.abspath(fpath)
chset = self.__bytes2str(self.__ffi.string(dptr.charset))
self.dicts.append(DictionaryInfo(dptr, fpath, chset))
dptr = getattr(dptr, 'next')
# Save value for MeCab's internal character encoding
self.__enc = self.dicts[0].charset
# Set MeCab version string
self.version = self.__bytes2str(
self.__ffi.string(self.__mecab.mecab_version()))
except EnvironmentError as err:
logger.error(self._ERROR_INIT.format(str(err)))
raise MeCabError(err)
except ValueError as verr:
logger.error(self._ERROR_INIT.format(str(verr)))
raise MeCabError(self._ERROR_INIT.format(str(verr)))
| 787,129
|
Builds and returns the MeCab function for parsing Unicode text.
Args:
fn_name: MeCab function name that determines the function
behavior, either 'mecab_sparse_tostr' or
'mecab_nbest_sparse_tostr'.
Returns:
A function definition, tailored to parsing Unicode text and
returning the result as a string suitable for display on stdout,
using either the default or N-best behavior.
|
def __parse_tostr(self, text, **kwargs):
n = self.options.get('nbest', 1)
if self._KW_BOUNDARY in kwargs:
patt = kwargs.get(self._KW_BOUNDARY, '.')
tokens = list(self.__split_pattern(text, patt))
text = ''.join([t[0] for t in tokens])
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
bpos = 0
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, self.MECAB_TOKEN_BOUNDARY)
for (token, match) in tokens:
bpos += 1
if match:
mark = self.MECAB_INSIDE_TOKEN
else:
mark = self.MECAB_ANY_BOUNDARY
for _ in range(1, len(self.__str2bytes(token))):
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, mark)
bpos += 1
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, self.MECAB_TOKEN_BOUNDARY)
elif self._KW_FEATURE in kwargs:
features = kwargs.get(self._KW_FEATURE, ())
fd = {morph: self.__str2bytes(feat) for morph, feat in features}
tokens = self.__split_features(text, [e[0] for e in features])
text = ''.join([t[0] for t in tokens])
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
bpos = 0
for chunk, match in tokens:
c = len(self.__str2bytes(chunk))
if match == True:
self.__mecab.mecab_lattice_set_feature_constraint(
self.lattice, bpos, bpos+c, fd[chunk])
bpos += c
else:
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
self.__mecab.mecab_parse_lattice(self.tagger, self.lattice)
if n > 1:
res = self.__mecab.mecab_lattice_nbest_tostr(self.lattice, n)
else:
res = self.__mecab.mecab_lattice_tostr(self.lattice)
if res != self.__ffi.NULL:
raw = self.__ffi.string(res)
return self.__bytes2str(raw).strip()
else:
err = self.__mecab.mecab_lattice_strerror(self.lattice)
logger.error(self.__bytes2str(self.__ffi.string(err)))
raise MeCabError(self.__bytes2str(self.__ffi.string(err)))
| 787,131
|
Builds and returns the MeCab function for parsing to nodes using
morpheme boundary constraints.
Args:
format_feature: flag indicating whether or not to format the feature
value for each node yielded.
Returns:
A function which returns a Generator, tailored to using boundary
constraints and parsing as nodes, using either the default or
N-best behavior.
|
def __parse_tonodes(self, text, **kwargs):
n = self.options.get('nbest', 1)
try:
if self._KW_BOUNDARY in kwargs:
patt = kwargs.get(self._KW_BOUNDARY, '.')
tokens = list(self.__split_pattern(text, patt))
text = ''.join([t[0] for t in tokens])
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
bpos = 0
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, self.MECAB_TOKEN_BOUNDARY)
for (token, match) in tokens:
bpos += 1
if match:
mark = self.MECAB_INSIDE_TOKEN
else:
mark = self.MECAB_ANY_BOUNDARY
for _ in range(1, len(self.__str2bytes(token))):
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, mark)
bpos += 1
self.__mecab.mecab_lattice_set_boundary_constraint(
self.lattice, bpos, self.MECAB_TOKEN_BOUNDARY)
elif self._KW_FEATURE in kwargs:
features = kwargs.get(self._KW_FEATURE, ())
fd = {morph: self.__str2bytes(feat) for morph, feat in features}
tokens = self.__split_features(text, [e[0] for e in features])
text = ''.join([t[0] for t in tokens])
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
bpos = 0
for chunk, match in tokens:
c = len(self.__str2bytes(chunk))
if match:
self.__mecab.mecab_lattice_set_feature_constraint(
self.lattice, bpos, bpos+c, fd[chunk])
bpos += c
else:
btext = self.__str2bytes(text)
self.__mecab.mecab_lattice_set_sentence(self.lattice, btext)
self.__mecab.mecab_parse_lattice(self.tagger, self.lattice)
for _ in range(n):
check = self.__mecab.mecab_lattice_next(self.lattice)
if n == 1 or check:
nptr = self.__mecab.mecab_lattice_get_bos_node(self.lattice)
while nptr != self.__ffi.NULL:
# skip over any BOS nodes, since mecab does
if nptr.stat != MeCabNode.BOS_NODE:
raws = self.__ffi.string(
nptr.surface[0:nptr.length])
surf = self.__bytes2str(raws).strip()
if 'output_format_type' in self.options or \
'node_format' in self.options:
sp = self.__mecab.mecab_format_node(
self.tagger, nptr)
if sp != self.__ffi.NULL:
rawf = self.__ffi.string(sp)
else:
err = self.__mecab.mecab_strerror(
self.tagger)
err = self.__bytes2str(
self.__ffi.string(err))
msg = self._ERROR_NODEFORMAT.format(
surf, err)
raise MeCabError(msg)
else:
rawf = self.__ffi.string(nptr.feature)
feat = self.__bytes2str(rawf).strip()
mnode = MeCabNode(nptr, surf, feat)
yield mnode
nptr = getattr(nptr, 'next')
except GeneratorExit:
logger.debug('close invoked on generator')
except MeCabError:
raise
except:
err = self.__mecab.mecab_lattice_strerror(self.lattice)
logger.error(self.__bytes2str(self.__ffi.string(err)))
raise MeCabError(self.__bytes2str(self.__ffi.string(err)))
| 787,132
|
Returns a string concatenation of the MeCab options.
Args:
options: dictionary of options to use when instantiating the MeCab
instance.
Returns:
A string concatenation of the options used when instantiating the
MeCab instance, in long-form.
|
def build_options_str(self, options):
opts = []
for name in iter(list(self._SUPPORTED_OPTS.values())):
if name in options:
key = name.replace('_', '-')
if key in self._BOOLEAN_OPTIONS:
if options[name]:
opts.append('--{}'.format(key))
else:
opts.append('--{}={}'.format(key, options[name]))
return self.__str2bytes(' '.join(opts))
| 787,283
|
Return the default system encoding. If data is passed, try
to decode the data with the default system encoding or from a short
list of encoding types to test.
Args:
data - list of lists
Returns:
enc - system encoding
|
def _detect_encoding(data=None):
import locale
enc_list = ['utf-8', 'latin-1', 'iso8859-1', 'iso8859-2',
'utf-16', 'cp720']
code = locale.getpreferredencoding(False)
if data is None:
return code
if code.lower() not in enc_list:
enc_list.insert(0, code.lower())
for c in enc_list:
try:
for line in data:
line.decode(c)
except (UnicodeDecodeError, UnicodeError, AttributeError):
continue
return c
print("Encoding not detected. Please pass encoding value manually")
| 788,661
|
Subscribe will express interest in the given subject. The subject can
have wildcards (partial:*, full:>). Messages will be delivered to the
associated callback.
Args:
subject (string): a string with the subject
callback (function): callback to be called
|
def subscribe(self, subject, callback, queue=''):
s = Subscription(
sid=self._next_sid,
subject=subject,
queue=queue,
callback=callback,
connetion=self
)
self._subscriptions[s.sid] = s
self._send('SUB %s %s %d' % (s.subject, s.queue, s.sid))
self._next_sid += 1
return s
| 789,150
|
Unsubscribe will remove interest in the given subject. If max is
provided an automatic Unsubscribe that is processed by the server
when max messages have been received
Args:
subscription (pynats.Subscription): a Subscription object
max (int=None): number of messages
|
def unsubscribe(self, subscription, max=None):
if max is None:
self._send('UNSUB %d' % subscription.sid)
self._subscriptions.pop(subscription.sid)
else:
subscription.max = max
self._send('UNSUB %d %s' % (subscription.sid, max))
| 789,151
|
Publish publishes the data argument to the given subject.
Args:
subject (string): a string with the subject
msg (string): payload string
reply (string): subject used in the reply
|
def publish(self, subject, msg, reply=None):
if msg is None:
msg = ''
if reply is None:
command = 'PUB %s %d' % (subject, len(msg))
else:
command = 'PUB %s %s %d' % (subject, reply, len(msg))
self._send(command)
self._send(msg)
| 789,152
|
ublish a message with an implicit inbox listener as the reply.
Message is optional.
Args:
subject (string): a string with the subject
callback (function): callback to be called
msg (string=None): payload string
|
def request(self, subject, callback, msg=None):
inbox = self._build_inbox()
s = self.subscribe(inbox, callback)
self.unsubscribe(s, 1)
self.publish(subject, msg, inbox)
return s
| 789,153
|
Publish publishes the data argument to the given subject.
Args:
duration (float): will wait for the given number of seconds
count (count): stop of wait after n messages from any subject
|
def wait(self, duration=None, count=0):
start = time.time()
total = 0
while True:
type, result = self._recv(MSG, PING, OK)
if type is MSG:
total += 1
if self._handle_msg(result) is False:
break
if count and total >= count:
break
elif type is PING:
self._handle_ping()
if duration and time.time() - start > duration:
break
| 789,155
|
Initializes the `BackgroundSubtractorGMG`.
*Note:* Requires OpenCV to be built with `--contrib` as it uses the
`bgsegm` package.
Unless a custom `structuring_element` is specified, it uses:
`cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))`
Args:
structuring_element: The structuring element.
|
def __init__(self, structuring_element=None):
if structuring_element is None:
self.strel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, (3, 3))
else:
self.strel = structuring_element
self.fgbg = cv2.bgsegm.createBackgroundSubtractorGMG()
| 790,414
|
Loads the hat from a picture at path.
Args:
path: The path to load from
Returns:
The hat data.
|
def load_hat(self, path): # pylint: disable=no-self-use
hat = cv2.imread(path, cv2.IMREAD_UNCHANGED)
if hat is None:
raise ValueError('No hat image found at `{}`'.format(path))
b, g, r, a = cv2.split(hat)
return cv2.merge((r, g, b, a))
| 790,417
|
Uses a haarcascade to detect faces inside an image.
Args:
image: The image.
draw_box: If True, the image will be marked with a rectangle.
Return:
The faces as returned by OpenCV's detectMultiScale method for
cascades.
|
def find_faces(self, image, draw_box=False):
frame_gray = cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)
faces = self.cascade.detectMultiScale(
frame_gray,
scaleFactor=1.3,
minNeighbors=5,
minSize=(50, 50),
flags=0)
if draw_box:
for x, y, w, h in faces:
cv2.rectangle(image, (x, y),
(x + w, y + h), (0, 255, 0), 2)
return faces
| 790,418
|
Draws a hat on top of detected faces inside the image.
Args:
image: The image.
Returns:
The image with a hat.
|
def __call__(self, image): # pylint: disable=too-many-locals
frame_height = image.shape[0]
frame_width = image.shape[1]
faces = self.find_faces(image, self.draw_box)
for x, y, w, h in faces: # pylint: disable=unused-variable
hat = self.hat.copy()
# Scale hat to fit face.
hat_width = int(w * self.w_offset)
hat_height = int(hat_width * hat.shape[0] / hat.shape[1])
hat = cv2.resize(hat, (hat_width, hat_height))
# Clip hat if outside frame.
hat_left = 0
hat_top = 0
hat_bottom = hat_height
hat_right = hat_width
y0 = y - hat_height + self.y_offset
if y0 < 0: # If the hat starts above the frame, clip it.
hat_top = abs(y0) # Find beginning of hat ROI.
y0 = 0
y1 = y0 + hat_height - hat_top
if y1 > frame_height:
hat_bottom = hat_height - (y1 - frame_height)
y1 = frame_height
x0 = x + self.x_offset
if x0 < 0:
hat_left = abs(x0)
x0 = 0
x1 = x0 + hat_width - hat_left
if x1 > frame_width:
hat_right = hat_width - (x1 - frame_width)
x1 = frame_width
# Remove background from hat image.
for c in range(0, 3):
hat_slice = hat[hat_top:hat_bottom, hat_left:hat_right, c] * \
(hat[hat_top:hat_bottom, hat_left:hat_right, 3] / 255.0)
bg_slice = image[y0:y1, x0:x1, c] * \
(1.0 - hat[hat_top:hat_bottom, hat_left:hat_right, 3]
/ 255.0)
image[y0:y1, x0:x1, c] = hat_slice + bg_slice
return image
| 790,419
|
Checks if a function in a module was declared in that module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod: the module
fun: the function
|
def is_mod_function(mod, fun):
return inspect.isfunction(fun) and inspect.getmodule(fun) == mod
| 790,573
|
Checks if a class in a module was declared in that module.
Args:
mod: the module
cls: the class
|
def is_mod_class(mod, cls):
return inspect.isclass(cls) and inspect.getmodule(cls) == mod
| 790,574
|
Lists all functions declared in a module.
http://stackoverflow.com/a/1107150/3004221
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
|
def list_functions(mod_name):
mod = sys.modules[mod_name]
return [func.__name__ for func in mod.__dict__.values()
if is_mod_function(mod, func)]
| 790,575
|
Lists all classes declared in a module.
Args:
mod_name: the module name
Returns:
A list of functions declared in that module.
|
def list_classes(mod_name):
mod = sys.modules[mod_name]
return [cls.__name__ for cls in mod.__dict__.values()
if is_mod_class(mod, cls)]
| 790,576
|
Returns a dictionary which maps function names to line numbers.
Args:
functions: a list of function names
module: the module to look the functions up
searchstr: the string to search for
Returns:
A dictionary with functions as keys and their line numbers as values.
|
def get_linenumbers(functions, module, searchstr='def {}(image):\n'):
lines = inspect.getsourcelines(module)[0]
line_numbers = {}
for function in functions:
try:
line_numbers[function] = lines.index(
searchstr.format(function)) + 1
except ValueError:
print(r'Can not find `{}`'.format(searchstr.format(function)))
line_numbers[function] = 0
return line_numbers
| 790,577
|
Prints information about the unprocessed image.
Reads one frame from the source to determine image colors, dimensions
and data types.
Args:
capture: the source to read from.
|
def print_info(self, capture):
self.frame_offset += 1
ret, frame = capture.read()
if ret:
print('Capture Information')
print('\tDimensions (HxW): {}x{}'.format(*frame.shape[0:2]))
print('\tColor channels: {}'.format(frame.shape[2] if
len(frame.shape) > 2 else 1))
print('\tColor range: {}-{}'.format(np.min(frame),
np.max(frame)))
print('\tdtype: {}'.format(frame.dtype))
else:
print('No source found.')
| 790,605
|
Annotates the processed axis with given annotations for
the provided framedata.
Args:
framedata: The current frame number.
|
def annotate(self, framedata):
for artist in self.annotation_artists:
artist.remove()
self.annotation_artists = []
for annotation in self.annotations:
if annotation[2] > framedata:
return
if annotation[2] == framedata:
pos = annotation[0:2]
shape = self.annotations_default['shape']
color = self.annotations_default['color']
size = self.annotations_default['size']
line = self.annotations_default['line']
if len(annotation) > 3:
shape = annotation[3].get('shape', shape)
color = annotation[3].get('color', color)
size = annotation[3].get('size', size)
line = annotation[3].get('line', line)
if shape == 'CIRC' and hasattr(size, '__len__'):
size = 30
if not hasattr(color, '__len__'):
color = (color,) * 3
if shape == 'RECT':
patch = patches.Rectangle((pos[0] - size[0] // 2,
pos[1] - size[1] // 2),
size[0], size[1], fill=False,
lw=line, fc='none', ec=color)
elif shape == 'CIRC':
patch = patches.CirclePolygon(pos, radius=size, fc='none',
ec=color, lw=line)
self.annotation_artists.append(patch)
self.axes_processed.add_artist(self.annotation_artists[-1])
| 790,609
|
Reads, processes and draws the frames.
If needed for color maps, conversions to gray scale are performed. In
case the images are no color images and no custom color maps are
defined, the colormap `gray` is applied.
This function is called by TimedAnimation.
Args:
framedata: The frame data.
|
def _draw_frame(self, framedata):
original = self.read_frame()
if original is None:
self.update_info(self.info_string(message='Finished.',
frame=framedata))
return
if self.original is not None:
processed = self.process_frame(original.copy())
if self.cmap_original is not None:
original = to_gray(original)
elif not is_color_image(original):
self.original.set_cmap('gray')
self.original.set_data(original)
else:
processed = self.process_frame(original)
if self.cmap_processed is not None:
processed = to_gray(processed)
elif not is_color_image(processed):
self.processed.set_cmap('gray')
if self.annotations:
self.annotate(framedata)
self.processed.set_data(processed)
self.update_info(self.info_string(frame=framedata))
| 790,610
|
Updates the figure's suptitle.
Calls self.info_string() unless custom is provided.
Args:
custom: Overwrite it with this string, unless None.
|
def update_info(self, custom=None):
self.figure.suptitle(self.info_string() if custom is None else custom)
| 790,611
|
Iterator yielding unprefixed events.
Parameters:
- response: a stream response from requests
|
def basic_parse(response, buf_size=ijson.backend.BUFSIZE):
lexer = iter(IncrementalJsonParser.lexer(response, buf_size))
for value in ijson.backend.parse_value(lexer):
yield value
try:
next(lexer)
except StopIteration:
pass
else:
raise ijson.common.JSONError('Additional data')
| 791,954
|
Method to insert the Output Config.
app_name, release_version, pset_hash, global_tag and output_module_label are
required.
args:
businput(dic): input dictionary.
Updated Oct 12, 2011
|
def insertOutputConfig(self, businput):
if not ("app_name" in businput and "release_version" in businput\
and "pset_hash" in businput and "output_module_label" in businput
and "global_tag" in businput):
dbsExceptionHandler('dbsException-invalid-input', "business/DBSOutputConfig/insertOutputConfig require:\
app_name, release_version, pset_hash, output_module_label and global_tag")
conn = self.dbi.connection()
tran = conn.begin()
try:
# Proceed with o/p module insertion
businput['scenario'] = businput.get("scenario", None)
businput['pset_name'] = businput.get("pset_name", None)
self.outmodin.execute(conn, businput, tran)
tran.commit()
tran = None
except SQLAlchemyIntegrityError as ex:
if str(ex).find("unique constraint") != -1 or str(ex).lower().find("duplicate") != -1:
#if the validation is due to a unique constrain break in OUTPUT_MODULE_CONFIGS
if str(ex).find("TUC_OMC_1") != -1: pass
#otherwise, try again
else:
try:
self.outmodin.execute(conn, businput, tran)
tran.commit()
tran = None
except SQLAlchemyIntegrityError as ex1:
if str(ex1).find("unique constraint") != -1 and str(ex1).find("TUC_OMC_1") != -1: pass
except Exception as e1:
if tran:
tran.rollback()
tran = None
raise
else:
raise
except Exception as e:
if tran:
tran.rollback()
raise
finally:
if tran:
tran.rollback()
if conn:
conn.close()
| 793,374
|
Constructor
Args:
client_id (str): Client ID provided by Box.
client_secret (str): Client Secret provided by Box.
|
def __init__(self, client_id, client_secret):
self.box_request = BoxRestRequest(client_id, client_secret)
self.client_id = client_id
self.client_secret = client_secret
| 793,778
|
Syncs entities
Args:
model_objs (List[Model]): The model objects to sync. If empty, all entities will be synced
|
def sync_entities(*model_objs):
# Check if we are deferring processing
if sync_entities.defer:
# If we dont have any model objects passed add a none to let us know that we need to sync all
if not model_objs:
sync_entities.buffer[None] = None
else:
# Add each model obj to the buffer
for model_obj in model_objs:
sync_entities.buffer[(model_obj.__class__, model_obj.pk)] = model_obj
# Return false that we did not do anything
return False
# Create a syncer and sync
EntitySyncer(*model_objs).sync()
| 793,814
|
Delete an existing folder
Args:
folder_id (int): ID of the folder to delete.
recursive (bool): Delete all subfolder if True.
Returns:
dict. Response from Box.
Raises:
BoxError: An error response is returned from Box (status_code >= 400).
BoxHttpResponseError: Response from Box is malformed.
requests.exceptions.*: Any connection related problem.
|
def delete_folder(self, folder_id, recursive=True):
return self.__request("DELETE", "folders/%s" % (folder_id, ),
querystring={'recursive': unicode(recursive).lower()})
| 793,877
|
Nhap dai han
Args:
cucSo (TYPE): Description
gioiTinh (TYPE): Description
Returns:
TYPE: Description
|
def nhapDaiHan(self, cucSo, gioiTinh):
for cung in self.thapNhiCung:
khoangCach = khoangCachCung(cung.cungSo, self.cungMenh, gioiTinh)
cung.daiHan(cucSo + khoangCach * 10)
return self
| 795,158
|
Summary
Args:
nn (TYPE): ngay
tt (TYPE): thang
nnnn (TYPE): nam
duongLich (bool, optional): bool
timeZone (int, optional): +7 Vietnam
Returns:
TYPE: Description
Raises:
Exception: Description
|
def ngayThangNam(nn, tt, nnnn, duongLich=True, timeZone=7):
thangNhuan = 0
# if nnnn > 1000 and nnnn < 3000 and nn > 0 and \
if nn > 0 and \
nn < 32 and tt < 13 and tt > 0:
if duongLich is True:
[nn, tt, nnnn, thangNhuan] = S2L(nn, tt, nnnn, timeZone=timeZone)
return [nn, tt, nnnn, thangNhuan]
else:
raise Exception("Ngày, tháng, năm không chính xác.")
| 795,164
|
Summary
Args:
nn (int): ngày
tt (int): tháng
nnnn (int): năm
duongLich (bool, optional): True nếu là dương lịch, False âm lịch
timeZone (int, optional): Múi giờ
thangNhuan (bool, optional): Có phải là tháng nhuận không?
Returns:
TYPE: Description
|
def canChiNgay(nn, tt, nnnn, duongLich=True, timeZone=7, thangNhuan=False):
if duongLich is False:
[nn, tt, nnnn] = L2S(nn, tt, nnnn, thangNhuan, timeZone)
jd = jdFromDate(nn, tt, nnnn)
# print jd
canNgay = (jd + 9) % 10 + 1
chiNgay = (jd + 1) % 12 + 1
return [canNgay, chiNgay]
| 795,165
|
chuyển đổi năm, tháng âm/dương lịch sang Can, Chi trong tiếng Việt.
Không tính đến can ngày vì phải chuyển đổi qua lịch Julius.
Hàm tìm can ngày là hàm canChiNgay(nn, tt, nnnn, duongLich=True,\
timeZone=7, thangNhuan=False)
Args:
nn (int): Ngày
tt (int): Tháng
nnnn (int): Năm
Returns:
TYPE: Description
|
def ngayThangNamCanChi(nn, tt, nnnn, duongLich=True, timeZone=7):
if duongLich is True:
[nn, tt, nnnn, thangNhuan] = \
ngayThangNam(nn, tt, nnnn, timeZone=timeZone)
# Can của tháng
canThang = (nnnn * 12 + tt + 3) % 10 + 1
# Can chi của năm
canNamSinh = (nnnn + 6) % 10 + 1
chiNam = (nnnn + 8) % 12 + 1
return [canThang, canNamSinh, chiNam]
| 795,166
|
Sử dụng Ngũ Hành nạp âm để tính Hành của năm.
Args:
diaChi (integer): Số thứ tự của địa chi (Tý=1, Sửu=2,...)
thienCan (integer): Số thứ tự của thiên can (Giáp=1, Ất=2,...)
Returns:
Trả về chữ viết tắt Hành của năm (K, T, H, O, M)
|
def nguHanhNapAm(diaChi, thienCan, xuatBanMenh=False):
banMenh = {
"K1": "HẢI TRUNG KIM",
"T1": "GIÁNG HẠ THỦY",
"H1": "TÍCH LỊCH HỎA",
"O1": "BÍCH THƯỢNG THỔ",
"M1": "TANG ÐỐ MỘC",
"T2": "ÐẠI KHÊ THỦY",
"H2": "LƯ TRUNG HỎA",
"O2": "THÀNH ÐẦU THỔ",
"M2": "TÒNG BÁ MỘC",
"K2": "KIM BẠCH KIM",
"H3": "PHÚ ÐĂNG HỎA",
"O3": "SA TRUNG THỔ",
"M3": "ÐẠI LÂM MỘC",
"K3": "BẠCH LẠP KIM",
"T3": "TRƯỜNG LƯU THỦY",
"K4": "SA TRUNG KIM",
"T4": "THIÊN HÀ THỦY",
"H4": "THIÊN THƯỢNG HỎA",
"O4": "LỘ BÀN THỔ",
"M4": "DƯƠNG LIỄU MỘC",
"T5": "TRUYỀN TRUNG THỦY",
"H5": "SƠN HẠ HỎA",
"O5": "ÐẠI TRẠCH THỔ",
"M5": "THẠCH LỰU MỘC",
"K5": "KIẾM PHONG KIM",
"H6": "SƠN ÐẦU HỎA",
"O6": "ỐC THƯỢNG THỔ",
"M6": "BÌNH ÐỊA MỘC",
"K6": "XOA XUYẾN KIM",
"T6": "ÐẠI HẢI THỦY"}
matranNapAm = [
[0, "G", "Ất", "Bính", "Đinh", "Mậu", "Kỷ", "Canh", "Tân", "N", "Q"],
[1, "K1", False, "T1", False, "H1", False, "O1", False, "M1", False],
[2, False, "K1", False, "T1", False, "H1", False, "O1", False, "M1"],
[3, "T2", False, "H2", False, "O2", False, "M2", False, "K2", False],
[4, False, "T2", False, "H2", False, "O2", False, "M2", False, "K2"],
[5, "H3", False, "O3", False, "M3", False, "K3", False, "T3", False],
[6, False, "H3", False, "O3", False, "M3", False, "K3", False, "T3"],
[7, "K4", False, "T4", False, "H4", False, "O4", False, "M4", False],
[8, False, "K4", False, "T4", False, "H4", False, "O4", False, "M4"],
[9, "T5", False, "H5", False, "O5", False, "M5", False, "K5", False],
[10, False, "T5", False, "H5", False, "O5", False, "M5", False, "K5"],
[11, "H6", False, "O6", False, "M6", False, "K6", False, "T6", False],
[12, False, "H6", False, "O6", False, "M6", False, "K6", False, "T6"]
]
try:
nh = matranNapAm[diaChi][thienCan]
if nh[0] in ["K", "M", "T", "H", "O"]:
if xuatBanMenh is True:
return banMenh[nh]
else:
return nh[0]
except:
raise Exception(nguHanhNapAm.__doc__)
| 795,168
|
Tìm vị trí của sao Tử vi
Args:
cuc (TYPE): Description
ngaySinhAmLich (TYPE): Description
Returns:
TYPE: Description
Raises:
Exception: Description
|
def timTuVi(cuc, ngaySinhAmLich):
cungDan = 3 # Vị trí cung Dần ban đầu là 3
cucBanDau = cuc
if cuc not in [2, 3, 4, 5, 6]: # Tránh trường hợp infinite loop
raise Exception("Số cục phải là 2, 3, 4, 5, 6")
while cuc < ngaySinhAmLich:
cuc += cucBanDau
cungDan += 1 # Dịch vị trí cung Dần
saiLech = cuc - ngaySinhAmLich
if saiLech % 2 is 1:
saiLech = -saiLech # Nếu sai lệch là chẵn thì tiến, lẻ thì lùi
return dichCung(cungDan, saiLech)
| 795,171
|
Register a new handler for a specific :class:`slack.actions.Action` `callback_id`.
Optional routing based on the action name too.
The name argument is useful for actions of type `interactive_message` to provide
a different handler for each individual action.
Args:
callback_id: Callback_id the handler is interested in
handler: Callback
name: Name of the action (optional).
|
def register(self, callback_id: str, handler: Any, name: str = "*") -> None:
LOG.info("Registering %s, %s to %s", callback_id, name, handler)
if name not in self._routes[callback_id]:
self._routes[callback_id][name] = []
self._routes[callback_id][name].append(handler)
| 796,000
|
Yields handlers matching the incoming :class:`slack.actions.Action` `callback_id`.
Args:
action: :class:`slack.actions.Action`
Yields:
handler
|
def dispatch(self, action: Action) -> Any:
LOG.debug("Dispatching action %s, %s", action["type"], action["callback_id"])
if action["type"] == "interactive_message":
yield from self._dispatch_interactive_message(action)
elif action["type"] in ("dialog_submission", "message_action"):
yield from self._dispatch_action(action)
else:
raise UnknownActionType(action)
| 796,001
|
Check request response status
Args:
status: Response status
headers: Response headers
data: Response data
Raises:
:class:`slack.exceptions.RateLimited`: For 429 status code
:class:`slack.exceptions:HTTPException`:
|
def raise_for_status(
status: int, headers: MutableMapping, data: MutableMapping
) -> None:
if status != 200:
if status == 429:
if isinstance(data, str):
error = data
else:
error = data.get("error", "ratelimited")
try:
retry_after = int(headers.get("Retry-After", 1))
except ValueError:
retry_after = 1
raise exceptions.RateLimited(retry_after, error, status, headers, data)
else:
raise exceptions.HTTPException(status, headers, data)
| 796,011
|
Check request response for Slack API error
Args:
headers: Response headers
data: Response data
Raises:
:class:`slack.exceptions.SlackAPIError`
|
def raise_for_api_error(headers: MutableMapping, data: MutableMapping) -> None:
if not data["ok"]:
raise exceptions.SlackAPIError(data.get("error", "unknow_error"), headers, data)
if "warning" in data:
LOG.warning("Slack API WARNING: %s", data["warning"])
| 796,012
|
Decode the response body
For 'application/json' content-type load the body as a dictionary
Args:
headers: Response headers
body: Response body
Returns:
decoded body
|
def decode_body(headers: MutableMapping, body: bytes) -> dict:
type_, encoding = parse_content_type(headers)
decoded_body = body.decode(encoding)
# There is one api that just returns `ok` instead of json. In order to have a consistent API we decided to modify the returned payload into a dict.
if type_ == "application/json":
payload = json.loads(decoded_body)
else:
if decoded_body == "ok":
payload = {"ok": True}
else:
payload = {"ok": False, "data": decoded_body}
return payload
| 796,013
|
Find content-type and encoding of the response
Args:
headers: Response headers
Returns:
:py:class:`tuple` (content-type, encoding)
|
def parse_content_type(headers: MutableMapping) -> Tuple[Optional[str], str]:
content_type = headers.get("content-type")
if not content_type:
return None, "utf-8"
else:
type_, parameters = cgi.parse_header(content_type)
encoding = parameters.get("charset", "utf-8")
return type_, encoding
| 796,014
|
Prepare outgoing request
Create url, headers, add token to the body and if needed json encode it
Args:
url: :class:`slack.methods` item or string of url
data: Outgoing data
headers: Custom headers
global_headers: Global headers
token: Slack API token
as_json: Post JSON to the slack API
Returns:
:py:class:`tuple` (url, body, headers)
|
def prepare_request(
url: Union[str, methods],
data: Optional[MutableMapping],
headers: Optional[MutableMapping],
global_headers: MutableMapping,
token: str,
as_json: Optional[bool] = None,
) -> Tuple[str, Union[str, MutableMapping], MutableMapping]:
if isinstance(url, methods):
as_json = as_json or url.value[3]
real_url = url.value[0]
else:
real_url = url
as_json = False
if not headers:
headers = {**global_headers}
else:
headers = {**global_headers, **headers}
payload: Optional[Union[str, MutableMapping]] = None
if real_url.startswith(HOOK_URL) or (real_url.startswith(ROOT_URL) and as_json):
payload, headers = _prepare_json_request(data, token, headers)
elif real_url.startswith(ROOT_URL) and not as_json:
payload = _prepare_form_encoded_request(data, token)
else:
real_url = ROOT_URL + real_url
payload = _prepare_form_encoded_request(data, token)
return real_url, payload, headers
| 796,015
|
Decode incoming response
Args:
status: Response status
headers: Response headers
body: Response body
Returns:
Response data
|
def decode_response(status: int, headers: MutableMapping, body: bytes) -> dict:
data = decode_body(headers, body)
raise_for_status(status, headers, data)
raise_for_api_error(headers, data)
return data
| 796,018
|
Find iteration mode and iteration key for a given :class:`slack.methods`
Args:
url: :class:`slack.methods` or string url
itermode: Custom iteration mode
iterkey: Custom iteration key
Returns:
:py:class:`tuple` (itermode, iterkey)
|
def find_iteration(
url: Union[methods, str],
itermode: Optional[str] = None,
iterkey: Optional[str] = None,
) -> Tuple[str, str]:
if isinstance(url, methods):
if not itermode:
itermode = url.value[1]
if not iterkey:
iterkey = url.value[2]
if not iterkey or not itermode:
raise ValueError("Iteration not supported for: {}".format(url))
elif itermode not in ITERMODE:
raise ValueError("Iteration not supported for: {}".format(itermode))
return itermode, iterkey
| 796,019
|
Decode incoming response from an iteration request
Args:
data: Response data
Returns:
Next itervalue
|
def decode_iter_request(data: dict) -> Optional[Union[str, int]]:
if "response_metadata" in data:
return data["response_metadata"].get("next_cursor")
elif "paging" in data:
current_page = int(data["paging"].get("page", 1))
max_page = int(data["paging"].get("pages", 1))
if current_page < max_page:
return current_page + 1
elif "has_more" in data and data["has_more"] and "latest" in data:
return data["messages"][-1]["ts"]
return None
| 796,021
|
Check if the incoming event needs to be discarded
Args:
event: Incoming :class:`slack.events.Event`
bot_id: Id of connected bot
Returns:
boolean
|
def discard_event(event: events.Event, bot_id: str = None) -> bool:
if event["type"] in SKIP_EVENTS:
return True
elif bot_id and isinstance(event, events.Message):
if event.get("bot_id") == bot_id:
LOG.debug("Ignoring event: %s", event)
return True
elif "message" in event and event["message"].get("bot_id") == bot_id:
LOG.debug("Ignoring event: %s", event)
return True
return False
| 796,022
|
Register a new handler for a specific slash command
Args:
command: Slash command
handler: Callback
|
def register(self, command: str, handler: Any):
if not command.startswith("/"):
command = f"/{command}"
LOG.info("Registering %s to %s", command, handler)
self._routes[command].append(handler)
| 796,040
|
Yields handlers matching the incoming :class:`slack.actions.Command`.
Args:
command: :class:`slack.actions.Command`
Yields:
handler
|
def dispatch(self, command: Command) -> Iterator[Any]:
LOG.debug("Dispatching command %s", command["command"])
for callback in self._routes[command["command"]]:
yield callback
| 796,041
|
Create an event with data coming from the RTM API.
If the event type is a message a :class:`slack.events.Message` is returned.
Args:
raw_event: JSON decoded data from the RTM API
Returns:
:class:`slack.events.Event` or :class:`slack.events.Message`
|
def from_rtm(cls, raw_event: MutableMapping) -> "Event":
if raw_event["type"].startswith("message"):
return Message(raw_event)
else:
return Event(raw_event)
| 796,127
|
Create a response message.
Depending on the incoming message the response can be in a thread. By default the response follow where the
incoming message was posted.
Args:
in_thread (boolean): Overwrite the `threading` behaviour
Returns:
a new :class:`slack.event.Message`
|
def response(self, in_thread: Optional[bool] = None) -> "Message":
data = {"channel": self["channel"]}
if in_thread:
if "message" in self:
data["thread_ts"] = (
self["message"].get("thread_ts") or self["message"]["ts"]
)
else:
data["thread_ts"] = self.get("thread_ts") or self["ts"]
elif in_thread is None:
if "message" in self and "thread_ts" in self["message"]:
data["thread_ts"] = self["message"]["thread_ts"]
elif "thread_ts" in self:
data["thread_ts"] = self["thread_ts"]
return Message(data)
| 796,130
|
Register a new handler for a specific :class:`slack.events.Event` `type` (See `slack event types documentation
<https://api.slack.com/events>`_ for a list of event types).
The arbitrary keyword argument is used as a key/value pair to compare against what is in the incoming
:class:`slack.events.Event`
Args:
event_type: Event type the handler is interested in
handler: Callback
**detail: Additional key for routing
|
def register(self, event_type: str, handler: Any, **detail: Any) -> None:
LOG.info("Registering %s, %s to %s", event_type, detail, handler)
if len(detail) > 1:
raise ValueError("Only one detail can be provided for additional routing")
elif not detail:
detail_key, detail_value = "*", "*"
else:
detail_key, detail_value = detail.popitem()
if detail_key not in self._routes[event_type]:
self._routes[event_type][detail_key] = {}
if detail_value not in self._routes[event_type][detail_key]:
self._routes[event_type][detail_key][detail_value] = []
self._routes[event_type][detail_key][detail_value].append(handler)
| 796,132
|
Yields handlers matching the routing of the incoming :class:`slack.events.Event`.
Args:
event: :class:`slack.events.Event`
Yields:
handler
|
def dispatch(self, event: Event) -> Iterator[Any]:
LOG.debug('Dispatching event "%s"', event.get("type"))
if event["type"] in self._routes:
for detail_key, detail_values in self._routes.get(
event["type"], {}
).items():
event_value = event.get(detail_key, "*")
yield from detail_values.get(event_value, [])
else:
return
| 796,133
|
Register a new handler for a specific :class:`slack.events.Message`.
The routing is based on regex pattern matching the message text and the incoming slack channel.
Args:
pattern: Regex pattern matching the message text.
handler: Callback
flags: Regex flags.
channel: Slack channel ID. Use * for any.
subtype: Message subtype
|
def register(
self,
pattern: str,
handler: Any,
flags: int = 0,
channel: str = "*",
subtype: Optional[str] = None,
) -> None:
LOG.debug('Registering message endpoint "%s: %s"', pattern, handler)
match = re.compile(pattern, flags)
if subtype not in self._routes[channel]:
self._routes[channel][subtype] = dict()
if match in self._routes[channel][subtype]:
self._routes[channel][subtype][match].append(handler)
else:
self._routes[channel][subtype][match] = [handler]
| 796,134
|
Yields handlers matching the routing of the incoming :class:`slack.events.Message`
Args:
message: :class:`slack.events.Message`
Yields:
handler
|
def dispatch(self, message: Message) -> Iterator[Any]:
if "text" in message:
text = message["text"] or ""
elif "message" in message:
text = message["message"].get("text", "")
else:
text = ""
msg_subtype = message.get("subtype")
for subtype, matchs in itertools.chain(
self._routes[message["channel"]].items(), self._routes["*"].items()
):
if msg_subtype == subtype or subtype is None:
for match, endpoints in matchs.items():
if match.search(text):
yield from endpoints
| 796,135
|
Query the slack API
When using :class:`slack.methods` the request is made `as_json` if available
Args:
url: :class:`slack.methods` or url string
data: JSON encodable MutableMapping
headers: Custom headers
as_json: Post JSON to the slack API
Returns:
dictionary of slack API response data
|
def query( # type: ignore
self,
url: Union[str, methods],
data: Optional[MutableMapping] = None,
headers: Optional[MutableMapping] = None,
as_json: Optional[bool] = None,
) -> dict:
url, body, headers = sansio.prepare_request(
url=url,
data=data,
headers=headers,
global_headers=self._headers,
token=self._token,
)
return self._make_query(url, body, headers)
| 796,139
|
Iterate over event from the RTM API
Args:
url: Websocket connection url
bot_id: Connecting bot ID
Returns:
:class:`slack.events.Event` or :class:`slack.events.Message`
|
def rtm( # type: ignore
self, url: Optional[str] = None, bot_id: Optional[str] = None
) -> Iterator[events.Event]:
while True:
bot_id = bot_id or self._find_bot_id()
url = url or self._find_rtm_url()
for event in self._incoming_from_rtm(url, bot_id):
yield event
url = None
| 796,140
|
Read values.
Args:
vals (list): list of strings representing values
|
def read(self, vals):
i = 0
if len(vals[i]) == 0:
self.city = None
else:
self.city = vals[i]
i += 1
if len(vals[i]) == 0:
self.state_province_region = None
else:
self.state_province_region = vals[i]
i += 1
if len(vals[i]) == 0:
self.country = None
else:
self.country = vals[i]
i += 1
if len(vals[i]) == 0:
self.source = None
else:
self.source = vals[i]
i += 1
if len(vals[i]) == 0:
self.wmo = None
else:
self.wmo = vals[i]
i += 1
if len(vals[i]) == 0:
self.latitude = None
else:
self.latitude = vals[i]
i += 1
if len(vals[i]) == 0:
self.longitude = None
else:
self.longitude = vals[i]
i += 1
if len(vals[i]) == 0:
self.timezone = None
else:
self.timezone = vals[i]
i += 1
if len(vals[i]) == 0:
self.elevation = None
else:
self.elevation = vals[i]
i += 1
| 797,381
|
Corresponds to IDD Field `city`
Args:
value (str): value for IDD Field `city`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def city(self, value=None):
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError('value {} need to be of type str '
'for field `city`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `city`')
self._city = value
| 797,382
|
Corresponds to IDD Field `state_province_region`
Args:
value (str): value for IDD Field `state_province_region`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def state_province_region(self, value=None):
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError(
'value {} need to be of type str '
'for field `state_province_region`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `state_province_region`')
self._state_province_region = value
| 797,383
|
Corresponds to IDD Field `country`
Args:
value (str): value for IDD Field `country`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def country(self, value=None):
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError('value {} need to be of type str '
'for field `country`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `country`')
self._country = value
| 797,384
|
Corresponds to IDD Field `source`
Args:
value (str): value for IDD Field `source`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def source(self, value=None):
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError('value {} need to be of type str '
'for field `source`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `source`')
self._source = value
| 797,385
|
Corresponds to IDD Field `wmo` usually a 6 digit field. Used as
alpha in EnergyPlus.
Args:
value (str): value for IDD Field `wmo`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def wmo(self, value=None):
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError('value {} need to be of type str '
'for field `wmo`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `wmo`')
self._wmo = value
| 797,386
|
Corresponds to IDD Field `timezone` Time relative to GMT.
Args:
value (float): value for IDD Field `timezone`
Unit: hr - not on standard units list???
Default value: 0.0
value >= -12.0
value <= 12.0
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def timezone(self, value=0.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `timezone`'.format(value))
if value < -12.0:
raise ValueError('value need to be greater or equal -12.0 '
'for field `timezone`')
if value > 12.0:
raise ValueError('value need to be smaller 12.0 '
'for field `timezone`')
self._timezone = value
| 797,389
|
Corresponds to IDD Field `elevation`
Args:
value (float): value for IDD Field `elevation`
Unit: m
Default value: 0.0
value >= -1000.0
value < 9999.9
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def elevation(self, value=0.0):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `elevation`'.format(value))
if value < -1000.0:
raise ValueError('value need to be greater or equal -1000.0 '
'for field `elevation`')
if value >= 9999.9:
raise ValueError('value need to be smaller 9999.9 '
'for field `elevation`')
self._elevation = value
| 797,390
|
Exports object to its string representation.
Args:
top (bool): if True appends `internal_name` before values.
All non list objects should be exported with value top=True,
all list objects, that are embedded in as fields inlist objects
should be exported with `top`=False
Returns:
str: The objects string representation
|
def export(self, top=True):
out = []
if top:
out.append(self._internal_name)
out.append(self._to_str(self.city))
out.append(self._to_str(self.state_province_region))
out.append(self._to_str(self.country))
out.append(self._to_str(self.source))
out.append(self._to_str(self.wmo))
out.append(self._to_str(self.latitude))
out.append(self._to_str(self.longitude))
out.append(self._to_str(self.timezone))
out.append(self._to_str(self.elevation))
return ",".join(out)
| 797,391
|
Read values.
Args:
vals (list): list of strings representing values
|
def read(self, vals):
i = 0
if len(vals[i]) == 0:
self.title_of_design_condition = None
else:
self.title_of_design_condition = vals[i]
i += 1
if len(vals[i]) == 0:
self.unkown_field = None
else:
self.unkown_field = vals[i]
i += 1
if len(vals[i]) == 0:
self.design_stat_heating = None
else:
self.design_stat_heating = vals[i]
i += 1
if len(vals[i]) == 0:
self.coldestmonth = None
else:
self.coldestmonth = vals[i]
i += 1
if len(vals[i]) == 0:
self.db996 = None
else:
self.db996 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db990 = None
else:
self.db990 = vals[i]
i += 1
if len(vals[i]) == 0:
self.dp996 = None
else:
self.dp996 = vals[i]
i += 1
if len(vals[i]) == 0:
self.hr_dp996 = None
else:
self.hr_dp996 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_dp996 = None
else:
self.db_dp996 = vals[i]
i += 1
if len(vals[i]) == 0:
self.dp990 = None
else:
self.dp990 = vals[i]
i += 1
if len(vals[i]) == 0:
self.hr_dp990 = None
else:
self.hr_dp990 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_dp990 = None
else:
self.db_dp990 = vals[i]
i += 1
if len(vals[i]) == 0:
self.ws004c = None
else:
self.ws004c = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_ws004c = None
else:
self.db_ws004c = vals[i]
i += 1
if len(vals[i]) == 0:
self.ws010c = None
else:
self.ws010c = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_ws010c = None
else:
self.db_ws010c = vals[i]
i += 1
if len(vals[i]) == 0:
self.ws_db996 = None
else:
self.ws_db996 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wd_db996 = None
else:
self.wd_db996 = vals[i]
i += 1
if len(vals[i]) == 0:
self.design_stat_cooling = None
else:
self.design_stat_cooling = vals[i]
i += 1
if len(vals[i]) == 0:
self.hottestmonth = None
else:
self.hottestmonth = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbr = None
else:
self.dbr = vals[i]
i += 1
if len(vals[i]) == 0:
self.db004 = None
else:
self.db004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wb_db004 = None
else:
self.wb_db004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db010 = None
else:
self.db010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wb_db010 = None
else:
self.wb_db010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db020 = None
else:
self.db020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wb_db020 = None
else:
self.wb_db020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wb004 = None
else:
self.wb004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_wb004 = None
else:
self.db_wb004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wb010 = None
else:
self.wb010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_wb010 = None
else:
self.db_wb010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wb020 = None
else:
self.wb020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_wb020 = None
else:
self.db_wb020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.ws_db004 = None
else:
self.ws_db004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wd_db004 = None
else:
self.wd_db004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.dp004 = None
else:
self.dp004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.hr_dp004 = None
else:
self.hr_dp004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_dp004 = None
else:
self.db_dp004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.dp010 = None
else:
self.dp010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.hr_dp010 = None
else:
self.hr_dp010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_dp010 = None
else:
self.db_dp010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.dp020 = None
else:
self.dp020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.hr_dp020 = None
else:
self.hr_dp020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_dp020 = None
else:
self.db_dp020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.en004 = None
else:
self.en004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_en004 = None
else:
self.db_en004 = vals[i]
i += 1
if len(vals[i]) == 0:
self.en010 = None
else:
self.en010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_en010 = None
else:
self.db_en010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.en020 = None
else:
self.en020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.db_en020 = None
else:
self.db_en020 = vals[i]
i += 1
if len(vals[i]) == 0:
self.hrs_84_and_db12_8_or_20_6 = None
else:
self.hrs_84_and_db12_8_or_20_6 = vals[i]
i += 1
if len(vals[i]) == 0:
self.design_stat_extremes = None
else:
self.design_stat_extremes = vals[i]
i += 1
if len(vals[i]) == 0:
self.ws010 = None
else:
self.ws010 = vals[i]
i += 1
if len(vals[i]) == 0:
self.ws025 = None
else:
self.ws025 = vals[i]
i += 1
if len(vals[i]) == 0:
self.ws050 = None
else:
self.ws050 = vals[i]
i += 1
if len(vals[i]) == 0:
self.wbmax = None
else:
self.wbmax = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmin_mean = None
else:
self.dbmin_mean = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmax_mean = None
else:
self.dbmax_mean = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmin_stddev = None
else:
self.dbmin_stddev = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmax_stddev = None
else:
self.dbmax_stddev = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmin05years = None
else:
self.dbmin05years = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmax05years = None
else:
self.dbmax05years = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmin10years = None
else:
self.dbmin10years = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmax10years = None
else:
self.dbmax10years = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmin20years = None
else:
self.dbmin20years = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmax20years = None
else:
self.dbmax20years = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmin50years = None
else:
self.dbmin50years = vals[i]
i += 1
if len(vals[i]) == 0:
self.dbmax50years = None
else:
self.dbmax50years = vals[i]
i += 1
| 797,393
|
Corresponds to IDD Field `title_of_design_condition`
Args:
value (str): value for IDD Field `title_of_design_condition`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def title_of_design_condition(self, value=None):
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError(
'value {} need to be of type str '
'for field `title_of_design_condition`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `title_of_design_condition`')
self._title_of_design_condition = value
| 797,394
|
Corresponds to IDD Field `unkown_field` Empty field in data.
Args:
value (str): value for IDD Field `unkown_field`
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def unkown_field(self, value=None):
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError('value {} need to be of type str '
'for field `unkown_field`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `unkown_field`')
self._unkown_field = value
| 797,395
|
Corresponds to IDD Field `design_stat_heating`
Args:
value (str): value for IDD Field `design_stat_heating`
Accepted values are:
- Heating
Default value: Heating
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def design_stat_heating(self, value="Heating"):
if value is not None:
try:
value = str(value)
except ValueError:
raise ValueError(
'value {} need to be of type str '
'for field `design_stat_heating`'.format(value))
if ',' in value:
raise ValueError('value should not contain a comma '
'for field `design_stat_heating`')
vals = set()
vals.add("Heating")
if value not in vals:
raise ValueError('value {} is not an accepted value for '
'field `design_stat_heating`'.format(value))
self._design_stat_heating = value
| 797,396
|
Corresponds to IDD Field `coldestmonth`
Args:
value (int): value for IDD Field `coldestmonth`
value >= 1
value <= 12
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def coldestmonth(self, value=None):
if value is not None:
try:
value = int(value)
except ValueError:
raise ValueError('value {} need to be of type int '
'for field `coldestmonth`'.format(value))
if value < 1:
raise ValueError('value need to be greater or equal 1 '
'for field `coldestmonth`')
if value > 12:
raise ValueError('value need to be smaller 12 '
'for field `coldestmonth`')
self._coldestmonth = value
| 797,397
|
Corresponds to IDD Field `db996`
Dry-bulb temperature corresponding to 99.6% annual cumulative
frequency of occurrence (cold conditions)
Args:
value (float): value for IDD Field `db996`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def db996(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `db996`'.format(value))
self._db996 = value
| 797,398
|
Corresponds to IDD Field `db990`
Dry-bulb temperature corresponding to 90.0% annual cumulative
frequency of occurrence (cold conditions)
Args:
value (float): value for IDD Field `db990`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def db990(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `db990`'.format(value))
self._db990 = value
| 797,399
|
Corresponds to IDD Field `dp996`
Dew-point temperature corresponding to 99.6% annual cumulative
frequency of occurrence (cold conditions)
Args:
value (float): value for IDD Field `dp996`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def dp996(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `dp996`'.format(value))
self._dp996 = value
| 797,400
|
Corresponds to IDD Field `dp990`
Dew-point temperature corresponding to 90.0% annual cumulative
frequency of occurrence (cold conditions)
Args:
value (float): value for IDD Field `dp990`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def dp990(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `dp990`'.format(value))
self._dp990 = value
| 797,403
|
Corresponds to IDD Field `ws004c`
Args:
value (float): value for IDD Field `ws004c`
Unit: m/s
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def ws004c(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `ws004c`'.format(value))
self._ws004c = value
| 797,406
|
Corresponds to IDD Field `db_ws004c`
Mean coincident dry-bulb temperature to wind speed corresponding to 0.40% cumulative frequency for coldest month
Args:
value (float): value for IDD Field `db_ws004c`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def db_ws004c(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `db_ws004c`'.format(value))
self._db_ws004c = value
| 797,407
|
Corresponds to IDD Field `ws010c`
Wind speed corresponding to 1.0% cumulative frequency
of occurrence for coldest month;
Args:
value (float): value for IDD Field `ws010c`
Unit: m/s
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def ws010c(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `ws010c`'.format(value))
self._ws010c = value
| 797,408
|
Corresponds to IDD Field `db_ws010c`
Mean coincident dry-bulb temperature to wind speed corresponding to 1.0% cumulative frequency for coldest month
Args:
value (float): value for IDD Field `db_ws010c`
Unit: C
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def db_ws010c(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `db_ws010c`'.format(value))
self._db_ws010c = value
| 797,409
|
Corresponds to IDD Field `ws_db996`
Mean wind speed coincident with 99.6% dry-bulb temperature
Args:
value (float): value for IDD Field `ws_db996`
Unit: m/s
if `value` is None it will not be checked against the
specification and is assumed to be a missing value
Raises:
ValueError: if `value` is not a valid value
|
def ws_db996(self, value=None):
if value is not None:
try:
value = float(value)
except ValueError:
raise ValueError('value {} need to be of type float '
'for field `ws_db996`'.format(value))
self._ws_db996 = value
| 797,410
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.