repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
rueckstiess/mtools | mtools/util/profile_collection.py | ProfileCollection.num_events | def num_events(self):
"""Lazy evaluation of the number of events."""
if not self._num_events:
self._num_events = self.coll_handle.count()
return self._num_events | python | def num_events(self):
"""Lazy evaluation of the number of events."""
if not self._num_events:
self._num_events = self.coll_handle.count()
return self._num_events | [
"def",
"num_events",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_num_events",
":",
"self",
".",
"_num_events",
"=",
"self",
".",
"coll_handle",
".",
"count",
"(",
")",
"return",
"self",
".",
"_num_events"
] | Lazy evaluation of the number of events. | [
"Lazy",
"evaluation",
"of",
"the",
"number",
"of",
"events",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/profile_collection.py#L88-L92 | train | 225,100 |
rueckstiess/mtools | mtools/util/profile_collection.py | ProfileCollection.next | def next(self):
"""Make iterators."""
if not self.cursor:
self.cursor = self.coll_handle.find().sort([("ts", ASCENDING)])
doc = self.cursor.next()
doc['thread'] = self.name
le = LogEvent(doc)
return le | python | def next(self):
"""Make iterators."""
if not self.cursor:
self.cursor = self.coll_handle.find().sort([("ts", ASCENDING)])
doc = self.cursor.next()
doc['thread'] = self.name
le = LogEvent(doc)
return le | [
"def",
"next",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"cursor",
":",
"self",
".",
"cursor",
"=",
"self",
".",
"coll_handle",
".",
"find",
"(",
")",
".",
"sort",
"(",
"[",
"(",
"\"ts\"",
",",
"ASCENDING",
")",
"]",
")",
"doc",
"=",
"se... | Make iterators. | [
"Make",
"iterators",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/profile_collection.py#L94-L102 | train | 225,101 |
rueckstiess/mtools | mtools/util/profile_collection.py | ProfileCollection._calculate_bounds | def _calculate_bounds(self):
"""Calculate beginning and end of log events."""
# get start datetime
first = self.coll_handle.find_one(None, sort=[("ts", ASCENDING)])
last = self.coll_handle.find_one(None, sort=[("ts", DESCENDING)])
self._start = first['ts']
if self._start.tzinfo is None:
self._start = self._start.replace(tzinfo=tzutc())
self._end = last['ts']
if self._end.tzinfo is None:
self._end = self._end.replace(tzinfo=tzutc())
return True | python | def _calculate_bounds(self):
"""Calculate beginning and end of log events."""
# get start datetime
first = self.coll_handle.find_one(None, sort=[("ts", ASCENDING)])
last = self.coll_handle.find_one(None, sort=[("ts", DESCENDING)])
self._start = first['ts']
if self._start.tzinfo is None:
self._start = self._start.replace(tzinfo=tzutc())
self._end = last['ts']
if self._end.tzinfo is None:
self._end = self._end.replace(tzinfo=tzutc())
return True | [
"def",
"_calculate_bounds",
"(",
"self",
")",
":",
"# get start datetime",
"first",
"=",
"self",
".",
"coll_handle",
".",
"find_one",
"(",
"None",
",",
"sort",
"=",
"[",
"(",
"\"ts\"",
",",
"ASCENDING",
")",
"]",
")",
"last",
"=",
"self",
".",
"coll_hand... | Calculate beginning and end of log events. | [
"Calculate",
"beginning",
"and",
"end",
"of",
"log",
"events",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/profile_collection.py#L117-L131 | train | 225,102 |
rueckstiess/mtools | mtools/mloginfo/sections/distinct_section.py | DistinctSection.run | def run(self):
"""Run each line through log2code and group by matched pattern."""
if ProfileCollection and isinstance(self.mloginfo.logfile,
ProfileCollection):
print("\n not available for system.profile collections\n")
return
codelines = defaultdict(lambda: 0)
non_matches = 0
# get log file information
logfile = self.mloginfo.logfile
if logfile.start and logfile.end and not self.mloginfo.args['verbose']:
progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) -
progress_start)
else:
self.mloginfo.progress_bar_enabled = False
for i, logevent in enumerate(self.mloginfo.logfile):
cl, _ = self.log2code(logevent.line_str)
# update progress bar every 1000 lines
if self.mloginfo.progress_bar_enabled and (i % 1000 == 0):
if logevent.datetime:
progress_curr = self.mloginfo._datetime_to_epoch(logevent
.datetime)
(self.mloginfo
.update_progress(float(progress_curr - progress_start) /
progress_total))
if cl:
codelines[cl.pattern] += 1
else:
if logevent.operation:
# skip operations (command, insert, update, delete,
# query, getmore)
continue
if not logevent.thread:
# skip the lines that don't have a thread name
# (usually map/reduce or assertions)
continue
if len(logevent.split_tokens) - logevent.datetime_nextpos <= 1:
# skip empty log messages (after thread name)
continue
if ("warning: log line attempted" in logevent.line_str and
"over max size" in logevent.line_str):
# skip lines that are too long
continue
# everything else is a real non-match
non_matches += 1
if self.mloginfo.args['verbose']:
print("couldn't match:" + logevent)
# clear progress bar again
if self.mloginfo.progress_bar_enabled:
self.mloginfo.update_progress(1.0)
if self.mloginfo.args['verbose']:
print('')
for cl in sorted(codelines, key=lambda x: codelines[x], reverse=True):
print("%8i %s" % (codelines[cl], " ... ".join(cl)))
print('')
if non_matches > 0:
print("distinct couldn't match %i lines" % non_matches)
if not self.mloginfo.args['verbose']:
print("to show non-matched lines, run with --verbose.") | python | def run(self):
"""Run each line through log2code and group by matched pattern."""
if ProfileCollection and isinstance(self.mloginfo.logfile,
ProfileCollection):
print("\n not available for system.profile collections\n")
return
codelines = defaultdict(lambda: 0)
non_matches = 0
# get log file information
logfile = self.mloginfo.logfile
if logfile.start and logfile.end and not self.mloginfo.args['verbose']:
progress_start = self.mloginfo._datetime_to_epoch(logfile.start)
progress_total = (self.mloginfo._datetime_to_epoch(logfile.end) -
progress_start)
else:
self.mloginfo.progress_bar_enabled = False
for i, logevent in enumerate(self.mloginfo.logfile):
cl, _ = self.log2code(logevent.line_str)
# update progress bar every 1000 lines
if self.mloginfo.progress_bar_enabled and (i % 1000 == 0):
if logevent.datetime:
progress_curr = self.mloginfo._datetime_to_epoch(logevent
.datetime)
(self.mloginfo
.update_progress(float(progress_curr - progress_start) /
progress_total))
if cl:
codelines[cl.pattern] += 1
else:
if logevent.operation:
# skip operations (command, insert, update, delete,
# query, getmore)
continue
if not logevent.thread:
# skip the lines that don't have a thread name
# (usually map/reduce or assertions)
continue
if len(logevent.split_tokens) - logevent.datetime_nextpos <= 1:
# skip empty log messages (after thread name)
continue
if ("warning: log line attempted" in logevent.line_str and
"over max size" in logevent.line_str):
# skip lines that are too long
continue
# everything else is a real non-match
non_matches += 1
if self.mloginfo.args['verbose']:
print("couldn't match:" + logevent)
# clear progress bar again
if self.mloginfo.progress_bar_enabled:
self.mloginfo.update_progress(1.0)
if self.mloginfo.args['verbose']:
print('')
for cl in sorted(codelines, key=lambda x: codelines[x], reverse=True):
print("%8i %s" % (codelines[cl], " ... ".join(cl)))
print('')
if non_matches > 0:
print("distinct couldn't match %i lines" % non_matches)
if not self.mloginfo.args['verbose']:
print("to show non-matched lines, run with --verbose.") | [
"def",
"run",
"(",
"self",
")",
":",
"if",
"ProfileCollection",
"and",
"isinstance",
"(",
"self",
".",
"mloginfo",
".",
"logfile",
",",
"ProfileCollection",
")",
":",
"print",
"(",
"\"\\n not available for system.profile collections\\n\"",
")",
"return",
"codelin... | Run each line through log2code and group by matched pattern. | [
"Run",
"each",
"line",
"through",
"log2code",
"and",
"group",
"by",
"matched",
"pattern",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mloginfo/sections/distinct_section.py#L39-L108 | train | 225,103 |
rueckstiess/mtools | mtools/util/pattern.py | shell2json | def shell2json(s):
"""Convert shell syntax to json."""
replace = {
r'BinData\(.+?\)': '1',
r'(new )?Date\(.+?\)': '1',
r'Timestamp\(.+?\)': '1',
r'ObjectId\(.+?\)': '1',
r'DBRef\(.+?\)': '1',
r'undefined': '1',
r'MinKey': '1',
r'MaxKey': '1',
r'NumberLong\(.+?\)': '1',
r'/.+?/\w*': '1'
}
for key, value in replace.items():
s = re.sub(key, value, s)
return s | python | def shell2json(s):
"""Convert shell syntax to json."""
replace = {
r'BinData\(.+?\)': '1',
r'(new )?Date\(.+?\)': '1',
r'Timestamp\(.+?\)': '1',
r'ObjectId\(.+?\)': '1',
r'DBRef\(.+?\)': '1',
r'undefined': '1',
r'MinKey': '1',
r'MaxKey': '1',
r'NumberLong\(.+?\)': '1',
r'/.+?/\w*': '1'
}
for key, value in replace.items():
s = re.sub(key, value, s)
return s | [
"def",
"shell2json",
"(",
"s",
")",
":",
"replace",
"=",
"{",
"r'BinData\\(.+?\\)'",
":",
"'1'",
",",
"r'(new )?Date\\(.+?\\)'",
":",
"'1'",
",",
"r'Timestamp\\(.+?\\)'",
":",
"'1'",
",",
"r'ObjectId\\(.+?\\)'",
":",
"'1'",
",",
"r'DBRef\\(.+?\\)'",
":",
"'1'",
... | Convert shell syntax to json. | [
"Convert",
"shell",
"syntax",
"to",
"json",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/pattern.py#L52-L70 | train | 225,104 |
rueckstiess/mtools | mtools/util/pattern.py | json2pattern | def json2pattern(s):
"""
Convert JSON format to a query pattern.
Includes even mongo shell notation without quoted key names.
"""
# make valid JSON by wrapping field names in quotes
s, _ = re.subn(r'([{,])\s*([^,{\s\'"]+)\s*:', ' \\1 "\\2" : ', s)
# handle shell values that are not valid JSON
s = shell2json(s)
# convert to 1 where possible, to get rid of things like new Date(...)
s, n = re.subn(r'([:,\[])\s*([^{}\[\]"]+?)\s*([,}\]])', '\\1 1 \\3', s)
# now convert to dictionary, converting unicode to ascii
try:
doc = json.loads(s, object_hook=_decode_pattern_dict)
return json.dumps(doc, sort_keys=True, separators=(', ', ': '))
except ValueError as ex:
return None | python | def json2pattern(s):
"""
Convert JSON format to a query pattern.
Includes even mongo shell notation without quoted key names.
"""
# make valid JSON by wrapping field names in quotes
s, _ = re.subn(r'([{,])\s*([^,{\s\'"]+)\s*:', ' \\1 "\\2" : ', s)
# handle shell values that are not valid JSON
s = shell2json(s)
# convert to 1 where possible, to get rid of things like new Date(...)
s, n = re.subn(r'([:,\[])\s*([^{}\[\]"]+?)\s*([,}\]])', '\\1 1 \\3', s)
# now convert to dictionary, converting unicode to ascii
try:
doc = json.loads(s, object_hook=_decode_pattern_dict)
return json.dumps(doc, sort_keys=True, separators=(', ', ': '))
except ValueError as ex:
return None | [
"def",
"json2pattern",
"(",
"s",
")",
":",
"# make valid JSON by wrapping field names in quotes",
"s",
",",
"_",
"=",
"re",
".",
"subn",
"(",
"r'([{,])\\s*([^,{\\s\\'\"]+)\\s*:'",
",",
"' \\\\1 \"\\\\2\" : '",
",",
"s",
")",
"# handle shell values that are not valid JSON",
... | Convert JSON format to a query pattern.
Includes even mongo shell notation without quoted key names. | [
"Convert",
"JSON",
"format",
"to",
"a",
"query",
"pattern",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/pattern.py#L73-L90 | train | 225,105 |
rueckstiess/mtools | mtools/util/print_table.py | print_table | def print_table(rows, override_headers=None, uppercase_headers=True):
"""All rows need to be a list of dictionaries, all with the same keys."""
if len(rows) == 0:
return
keys = list(rows[0].keys())
headers = override_headers or keys
if uppercase_headers:
rows = [dict(zip(keys,
map(lambda x: x.upper(), headers))), None] + rows
else:
rows = [dict(zip(keys, headers)), None] + rows
lengths = [max(len(str(row[k]))
for row in rows if hasattr(row, '__iter__')) for k in keys]
tmp = ['{%s:%i}' % (h, l) for h, l in zip(keys[: -1], lengths[: -1])]
tmp.append('{%s}' % keys[-1])
template = (' ' * 4).join(tmp)
for row in rows:
if type(row) == str:
print(row)
elif row is None:
print()
elif isinstance(row, dict):
row = {k: v if v is not None else 'None' for k, v in row.items()}
print(template.format(**row))
else:
print("Unhandled row type:", row) | python | def print_table(rows, override_headers=None, uppercase_headers=True):
"""All rows need to be a list of dictionaries, all with the same keys."""
if len(rows) == 0:
return
keys = list(rows[0].keys())
headers = override_headers or keys
if uppercase_headers:
rows = [dict(zip(keys,
map(lambda x: x.upper(), headers))), None] + rows
else:
rows = [dict(zip(keys, headers)), None] + rows
lengths = [max(len(str(row[k]))
for row in rows if hasattr(row, '__iter__')) for k in keys]
tmp = ['{%s:%i}' % (h, l) for h, l in zip(keys[: -1], lengths[: -1])]
tmp.append('{%s}' % keys[-1])
template = (' ' * 4).join(tmp)
for row in rows:
if type(row) == str:
print(row)
elif row is None:
print()
elif isinstance(row, dict):
row = {k: v if v is not None else 'None' for k, v in row.items()}
print(template.format(**row))
else:
print("Unhandled row type:", row) | [
"def",
"print_table",
"(",
"rows",
",",
"override_headers",
"=",
"None",
",",
"uppercase_headers",
"=",
"True",
")",
":",
"if",
"len",
"(",
"rows",
")",
"==",
"0",
":",
"return",
"keys",
"=",
"list",
"(",
"rows",
"[",
"0",
"]",
".",
"keys",
"(",
")... | All rows need to be a list of dictionaries, all with the same keys. | [
"All",
"rows",
"need",
"to",
"be",
"a",
"list",
"of",
"dictionaries",
"all",
"with",
"the",
"same",
"keys",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/print_table.py#L3-L30 | train | 225,106 |
rueckstiess/mtools | mtools/util/logevent.py | LogEvent.set_line_str | def set_line_str(self, line_str):
"""
Set line_str.
Line_str is only writeable if LogEvent was created from a string,
not from a system.profile documents.
"""
if not self.from_string:
raise ValueError("can't set line_str for LogEvent created from "
"system.profile documents.")
if line_str != self._line_str:
self._line_str = line_str.rstrip()
self._reset() | python | def set_line_str(self, line_str):
"""
Set line_str.
Line_str is only writeable if LogEvent was created from a string,
not from a system.profile documents.
"""
if not self.from_string:
raise ValueError("can't set line_str for LogEvent created from "
"system.profile documents.")
if line_str != self._line_str:
self._line_str = line_str.rstrip()
self._reset() | [
"def",
"set_line_str",
"(",
"self",
",",
"line_str",
")",
":",
"if",
"not",
"self",
".",
"from_string",
":",
"raise",
"ValueError",
"(",
"\"can't set line_str for LogEvent created from \"",
"\"system.profile documents.\"",
")",
"if",
"line_str",
"!=",
"self",
".",
"... | Set line_str.
Line_str is only writeable if LogEvent was created from a string,
not from a system.profile documents. | [
"Set",
"line_str",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logevent.py#L141-L154 | train | 225,107 |
rueckstiess/mtools | mtools/util/logevent.py | LogEvent.get_line_str | def get_line_str(self):
"""Return line_str depending on source, logfile or system.profile."""
if self.from_string:
return ' '.join([s for s in [self.merge_marker_str,
self._datetime_str,
self._line_str] if s])
else:
return ' '.join([s for s in [self._datetime_str,
self._line_str] if s]) | python | def get_line_str(self):
"""Return line_str depending on source, logfile or system.profile."""
if self.from_string:
return ' '.join([s for s in [self.merge_marker_str,
self._datetime_str,
self._line_str] if s])
else:
return ' '.join([s for s in [self._datetime_str,
self._line_str] if s]) | [
"def",
"get_line_str",
"(",
"self",
")",
":",
"if",
"self",
".",
"from_string",
":",
"return",
"' '",
".",
"join",
"(",
"[",
"s",
"for",
"s",
"in",
"[",
"self",
".",
"merge_marker_str",
",",
"self",
".",
"_datetime_str",
",",
"self",
".",
"_line_str",
... | Return line_str depending on source, logfile or system.profile. | [
"Return",
"line_str",
"depending",
"on",
"source",
"logfile",
"or",
"system",
".",
"profile",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logevent.py#L156-L164 | train | 225,108 |
rueckstiess/mtools | mtools/util/logevent.py | LogEvent._match_datetime_pattern | def _match_datetime_pattern(self, tokens):
"""
Match the datetime pattern at the beginning of the token list.
There are several formats that this method needs to understand
and distinguish between (see MongoDB's SERVER-7965):
ctime-pre2.4 Wed Dec 31 19:00:00
ctime Wed Dec 31 19:00:00.000
iso8601-utc 1970-01-01T00:00:00.000Z
iso8601-local 1969-12-31T19:00:00.000+0500
"""
# first check: less than 4 tokens can't be ctime
assume_iso8601_format = len(tokens) < 4
# check for ctime-pre-2.4 or ctime format
if not assume_iso8601_format:
weekday, month, day, time = tokens[:4]
if (len(tokens) < 4 or (weekday not in self.weekdays) or
(month not in self.months) or not day.isdigit()):
assume_iso8601_format = True
if assume_iso8601_format:
# sanity check, because the dateutil parser could interpret
# any numbers as a valid date
if not re.match(r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}',
tokens[0]):
return None
# convinced that this is a ISO-8601 format, the dateutil parser
# will do the rest
dt = dateutil.parser.parse(tokens[0])
self._datetime_format = "iso8601-utc" \
if tokens[0].endswith('Z') else "iso8601-local"
else:
# assume current year unless self.year_rollover
# is set (from LogFile)
year = datetime.now().year
dt = dateutil.parser.parse(' '.join(tokens[: 4]),
default=datetime(year, 1, 1))
if dt.tzinfo is None:
dt = dt.replace(tzinfo=tzutc())
if self._year_rollover and dt > self._year_rollover:
dt = dt.replace(year=year - 1)
self._datetime_format = "ctime" \
if '.' in tokens[3] else "ctime-pre2.4"
return dt | python | def _match_datetime_pattern(self, tokens):
"""
Match the datetime pattern at the beginning of the token list.
There are several formats that this method needs to understand
and distinguish between (see MongoDB's SERVER-7965):
ctime-pre2.4 Wed Dec 31 19:00:00
ctime Wed Dec 31 19:00:00.000
iso8601-utc 1970-01-01T00:00:00.000Z
iso8601-local 1969-12-31T19:00:00.000+0500
"""
# first check: less than 4 tokens can't be ctime
assume_iso8601_format = len(tokens) < 4
# check for ctime-pre-2.4 or ctime format
if not assume_iso8601_format:
weekday, month, day, time = tokens[:4]
if (len(tokens) < 4 or (weekday not in self.weekdays) or
(month not in self.months) or not day.isdigit()):
assume_iso8601_format = True
if assume_iso8601_format:
# sanity check, because the dateutil parser could interpret
# any numbers as a valid date
if not re.match(r'\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}',
tokens[0]):
return None
# convinced that this is a ISO-8601 format, the dateutil parser
# will do the rest
dt = dateutil.parser.parse(tokens[0])
self._datetime_format = "iso8601-utc" \
if tokens[0].endswith('Z') else "iso8601-local"
else:
# assume current year unless self.year_rollover
# is set (from LogFile)
year = datetime.now().year
dt = dateutil.parser.parse(' '.join(tokens[: 4]),
default=datetime(year, 1, 1))
if dt.tzinfo is None:
dt = dt.replace(tzinfo=tzutc())
if self._year_rollover and dt > self._year_rollover:
dt = dt.replace(year=year - 1)
self._datetime_format = "ctime" \
if '.' in tokens[3] else "ctime-pre2.4"
return dt | [
"def",
"_match_datetime_pattern",
"(",
"self",
",",
"tokens",
")",
":",
"# first check: less than 4 tokens can't be ctime",
"assume_iso8601_format",
"=",
"len",
"(",
"tokens",
")",
"<",
"4",
"# check for ctime-pre-2.4 or ctime format",
"if",
"not",
"assume_iso8601_format",
... | Match the datetime pattern at the beginning of the token list.
There are several formats that this method needs to understand
and distinguish between (see MongoDB's SERVER-7965):
ctime-pre2.4 Wed Dec 31 19:00:00
ctime Wed Dec 31 19:00:00.000
iso8601-utc 1970-01-01T00:00:00.000Z
iso8601-local 1969-12-31T19:00:00.000+0500 | [
"Match",
"the",
"datetime",
"pattern",
"at",
"the",
"beginning",
"of",
"the",
"token",
"list",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logevent.py#L282-L333 | train | 225,109 |
rueckstiess/mtools | mtools/util/logevent.py | LogEvent._extract_operation_and_namespace | def _extract_operation_and_namespace(self):
"""
Helper method to extract both operation and namespace from a logevent.
It doesn't make sense to only extract one as they appear back to back
in the token list.
"""
split_tokens = self.split_tokens
if not self._datetime_nextpos:
# force evaluation of thread to get access to datetime_offset and
# to protect from changes due to line truncation.
_ = self.thread
if not self._datetime_nextpos or (len(split_tokens) <=
self._datetime_nextpos + 2):
return
op = split_tokens[self._datetime_nextpos + 1].lower()
if op == 'warning:':
# check if this log line got truncated
if ("warning: log line attempted" in self._line_str and
"over max size" in self._line_str):
self._datetime_nextpos = split_tokens.index('...')
op = split_tokens[self._datetime_nextpos + 1]
else:
# unknown warning, bail out
return
if op in self.log_operations:
self._operation = op
self._namespace = split_tokens[self._datetime_nextpos + 2] | python | def _extract_operation_and_namespace(self):
"""
Helper method to extract both operation and namespace from a logevent.
It doesn't make sense to only extract one as they appear back to back
in the token list.
"""
split_tokens = self.split_tokens
if not self._datetime_nextpos:
# force evaluation of thread to get access to datetime_offset and
# to protect from changes due to line truncation.
_ = self.thread
if not self._datetime_nextpos or (len(split_tokens) <=
self._datetime_nextpos + 2):
return
op = split_tokens[self._datetime_nextpos + 1].lower()
if op == 'warning:':
# check if this log line got truncated
if ("warning: log line attempted" in self._line_str and
"over max size" in self._line_str):
self._datetime_nextpos = split_tokens.index('...')
op = split_tokens[self._datetime_nextpos + 1]
else:
# unknown warning, bail out
return
if op in self.log_operations:
self._operation = op
self._namespace = split_tokens[self._datetime_nextpos + 2] | [
"def",
"_extract_operation_and_namespace",
"(",
"self",
")",
":",
"split_tokens",
"=",
"self",
".",
"split_tokens",
"if",
"not",
"self",
".",
"_datetime_nextpos",
":",
"# force evaluation of thread to get access to datetime_offset and",
"# to protect from changes due to line trun... | Helper method to extract both operation and namespace from a logevent.
It doesn't make sense to only extract one as they appear back to back
in the token list. | [
"Helper",
"method",
"to",
"extract",
"both",
"operation",
"and",
"namespace",
"from",
"a",
"logevent",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logevent.py#L395-L427 | train | 225,110 |
rueckstiess/mtools | mtools/util/logevent.py | LogEvent._extract_counters | def _extract_counters(self):
"""Extract counters like nscanned and nreturned from the logevent."""
# extract counters (if present)
counters = ['nscanned', 'nscannedObjects', 'ntoreturn', 'nreturned',
'ninserted', 'nupdated', 'ndeleted', 'r', 'w', 'numYields',
'planSummary', 'writeConflicts', 'keyUpdates']
# TODO: refactor mtools to use current counter names throughout
# Transitionary hack: mapping of current names into prior equivalents
counter_equiv = {
'docsExamined': 'nscannedObjects',
'keysExamined': 'nscanned',
'nDeleted': 'ndeleted',
'nInserted': 'ninserted',
'nMatched': 'nreturned',
'nModified': 'nupdated'
}
counters.extend(counter_equiv.keys())
split_tokens = self.split_tokens
# trigger operation evaluation to get access to offset
if self.operation:
for t, token in enumerate(split_tokens[self.datetime_nextpos +
2:]):
for counter in counters:
if token.startswith('%s:' % counter):
try:
# Remap counter to standard name, if applicable
counter = counter_equiv.get(counter, counter)
vars(self)['_' + counter] = int((token.split(':')
[-1]).replace(',',
''))
except ValueError:
# see if this is a pre-2.5.2 numYields with space
# in between (e.g. "numYields: 2")
# https://jira.mongodb.org/browse/SERVER-10101
if (counter == 'numYields' and
token.startswith('numYields')):
try:
self._numYields = int((split_tokens[t + 1 + self.datetime_nextpos + 2]).replace(',', ''))
except ValueError:
pass
if (counter == 'planSummary' and
token.startswith('planSummary')):
try:
self._planSummary = split_tokens[t + 1 + self.datetime_nextpos + 2]
if self._planSummary:
if split_tokens[t + 1 + self.datetime_nextpos + 3] != '{':
self._actualPlanSummary = self._planSummary
else:
self._actualPlanSummary = '%s %s' % (
self._planSummary,
self._find_pattern('planSummary: %s' % self._planSummary, actual=True)
)
except ValueError:
pass
# token not parsable, skip
break | python | def _extract_counters(self):
"""Extract counters like nscanned and nreturned from the logevent."""
# extract counters (if present)
counters = ['nscanned', 'nscannedObjects', 'ntoreturn', 'nreturned',
'ninserted', 'nupdated', 'ndeleted', 'r', 'w', 'numYields',
'planSummary', 'writeConflicts', 'keyUpdates']
# TODO: refactor mtools to use current counter names throughout
# Transitionary hack: mapping of current names into prior equivalents
counter_equiv = {
'docsExamined': 'nscannedObjects',
'keysExamined': 'nscanned',
'nDeleted': 'ndeleted',
'nInserted': 'ninserted',
'nMatched': 'nreturned',
'nModified': 'nupdated'
}
counters.extend(counter_equiv.keys())
split_tokens = self.split_tokens
# trigger operation evaluation to get access to offset
if self.operation:
for t, token in enumerate(split_tokens[self.datetime_nextpos +
2:]):
for counter in counters:
if token.startswith('%s:' % counter):
try:
# Remap counter to standard name, if applicable
counter = counter_equiv.get(counter, counter)
vars(self)['_' + counter] = int((token.split(':')
[-1]).replace(',',
''))
except ValueError:
# see if this is a pre-2.5.2 numYields with space
# in between (e.g. "numYields: 2")
# https://jira.mongodb.org/browse/SERVER-10101
if (counter == 'numYields' and
token.startswith('numYields')):
try:
self._numYields = int((split_tokens[t + 1 + self.datetime_nextpos + 2]).replace(',', ''))
except ValueError:
pass
if (counter == 'planSummary' and
token.startswith('planSummary')):
try:
self._planSummary = split_tokens[t + 1 + self.datetime_nextpos + 2]
if self._planSummary:
if split_tokens[t + 1 + self.datetime_nextpos + 3] != '{':
self._actualPlanSummary = self._planSummary
else:
self._actualPlanSummary = '%s %s' % (
self._planSummary,
self._find_pattern('planSummary: %s' % self._planSummary, actual=True)
)
except ValueError:
pass
# token not parsable, skip
break | [
"def",
"_extract_counters",
"(",
"self",
")",
":",
"# extract counters (if present)",
"counters",
"=",
"[",
"'nscanned'",
",",
"'nscannedObjects'",
",",
"'ntoreturn'",
",",
"'nreturned'",
",",
"'ninserted'",
",",
"'nupdated'",
",",
"'ndeleted'",
",",
"'r'",
",",
"... | Extract counters like nscanned and nreturned from the logevent. | [
"Extract",
"counters",
"like",
"nscanned",
"and",
"nreturned",
"from",
"the",
"logevent",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logevent.py#L626-L685 | train | 225,111 |
rueckstiess/mtools | mtools/util/logevent.py | LogEvent.parse_all | def parse_all(self):
"""
Trigger extraction of all information.
These values are usually evaluated lazily.
"""
tokens = self.split_tokens
duration = self.duration
datetime = self.datetime
thread = self.thread
operation = self.operation
namespace = self.namespace
pattern = self.pattern
nscanned = self.nscanned
nscannedObjects = self.nscannedObjects
ntoreturn = self.ntoreturn
nreturned = self.nreturned
ninserted = self.ninserted
ndeleted = self.ndeleted
nupdated = self.nupdated
numYields = self.numYields
w = self.w
r = self.r | python | def parse_all(self):
"""
Trigger extraction of all information.
These values are usually evaluated lazily.
"""
tokens = self.split_tokens
duration = self.duration
datetime = self.datetime
thread = self.thread
operation = self.operation
namespace = self.namespace
pattern = self.pattern
nscanned = self.nscanned
nscannedObjects = self.nscannedObjects
ntoreturn = self.ntoreturn
nreturned = self.nreturned
ninserted = self.ninserted
ndeleted = self.ndeleted
nupdated = self.nupdated
numYields = self.numYields
w = self.w
r = self.r | [
"def",
"parse_all",
"(",
"self",
")",
":",
"tokens",
"=",
"self",
".",
"split_tokens",
"duration",
"=",
"self",
".",
"duration",
"datetime",
"=",
"self",
".",
"datetime",
"thread",
"=",
"self",
".",
"thread",
"operation",
"=",
"self",
".",
"operation",
"... | Trigger extraction of all information.
These values are usually evaluated lazily. | [
"Trigger",
"extraction",
"of",
"all",
"information",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logevent.py#L721-L743 | train | 225,112 |
rueckstiess/mtools | mtools/util/logevent.py | LogEvent.to_dict | def to_dict(self, labels=None):
"""Convert LogEvent object to a dictionary."""
output = {}
if labels is None:
labels = ['line_str', 'split_tokens', 'datetime', 'operation',
'thread', 'namespace', 'nscanned', 'ntoreturn',
'nreturned', 'ninserted', 'nupdated', 'ndeleted',
'duration', 'r', 'w', 'numYields']
for label in labels:
value = getattr(self, label, None)
if value is not None:
output[label] = value
return output | python | def to_dict(self, labels=None):
"""Convert LogEvent object to a dictionary."""
output = {}
if labels is None:
labels = ['line_str', 'split_tokens', 'datetime', 'operation',
'thread', 'namespace', 'nscanned', 'ntoreturn',
'nreturned', 'ninserted', 'nupdated', 'ndeleted',
'duration', 'r', 'w', 'numYields']
for label in labels:
value = getattr(self, label, None)
if value is not None:
output[label] = value
return output | [
"def",
"to_dict",
"(",
"self",
",",
"labels",
"=",
"None",
")",
":",
"output",
"=",
"{",
"}",
"if",
"labels",
"is",
"None",
":",
"labels",
"=",
"[",
"'line_str'",
",",
"'split_tokens'",
",",
"'datetime'",
",",
"'operation'",
",",
"'thread'",
",",
"'nam... | Convert LogEvent object to a dictionary. | [
"Convert",
"LogEvent",
"object",
"to",
"a",
"dictionary",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logevent.py#L823-L837 | train | 225,113 |
rueckstiess/mtools | mtools/util/logevent.py | LogEvent.to_json | def to_json(self, labels=None):
"""Convert LogEvent object to valid JSON."""
output = self.to_dict(labels)
return json.dumps(output, cls=DateTimeEncoder, ensure_ascii=False) | python | def to_json(self, labels=None):
"""Convert LogEvent object to valid JSON."""
output = self.to_dict(labels)
return json.dumps(output, cls=DateTimeEncoder, ensure_ascii=False) | [
"def",
"to_json",
"(",
"self",
",",
"labels",
"=",
"None",
")",
":",
"output",
"=",
"self",
".",
"to_dict",
"(",
"labels",
")",
"return",
"json",
".",
"dumps",
"(",
"output",
",",
"cls",
"=",
"DateTimeEncoder",
",",
"ensure_ascii",
"=",
"False",
")"
] | Convert LogEvent object to valid JSON. | [
"Convert",
"LogEvent",
"object",
"to",
"valid",
"JSON",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/logevent.py#L839-L842 | train | 225,114 |
rueckstiess/mtools | mtools/mlogfilter/mlogfilter.py | MLogFilterTool.addFilter | def addFilter(self, filterclass):
"""Add a filter class to the parser."""
if filterclass not in self.filters:
self.filters.append(filterclass) | python | def addFilter(self, filterclass):
"""Add a filter class to the parser."""
if filterclass not in self.filters:
self.filters.append(filterclass) | [
"def",
"addFilter",
"(",
"self",
",",
"filterclass",
")",
":",
"if",
"filterclass",
"not",
"in",
"self",
".",
"filters",
":",
"self",
".",
"filters",
".",
"append",
"(",
"filterclass",
")"
] | Add a filter class to the parser. | [
"Add",
"a",
"filter",
"class",
"to",
"the",
"parser",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/mlogfilter.py#L71-L74 | train | 225,115 |
rueckstiess/mtools | mtools/mlogfilter/mlogfilter.py | MLogFilterTool._outputLine | def _outputLine(self, logevent, length=None, human=False):
"""
Print the final line.
Provides various options (length, human, datetime changes, ...).
"""
# adapt timezone output if necessary
if self.args['timestamp_format'] != 'none':
logevent._reformat_timestamp(self.args['timestamp_format'],
force=True)
if any(self.args['timezone']):
if self.args['timestamp_format'] == 'none':
self.args['timestamp_format'] = logevent.datetime_format
logevent._reformat_timestamp(self.args['timestamp_format'],
force=True)
if self.args['json']:
print(logevent.to_json())
return
line = logevent.line_str
if length:
if len(line) > length:
line = (line[:int(length / 2 - 2)] + '...' +
line[int(-length / 2 + 1):])
if human:
line = self._changeMs(line)
line = self._formatNumbers(line)
print(line) | python | def _outputLine(self, logevent, length=None, human=False):
"""
Print the final line.
Provides various options (length, human, datetime changes, ...).
"""
# adapt timezone output if necessary
if self.args['timestamp_format'] != 'none':
logevent._reformat_timestamp(self.args['timestamp_format'],
force=True)
if any(self.args['timezone']):
if self.args['timestamp_format'] == 'none':
self.args['timestamp_format'] = logevent.datetime_format
logevent._reformat_timestamp(self.args['timestamp_format'],
force=True)
if self.args['json']:
print(logevent.to_json())
return
line = logevent.line_str
if length:
if len(line) > length:
line = (line[:int(length / 2 - 2)] + '...' +
line[int(-length / 2 + 1):])
if human:
line = self._changeMs(line)
line = self._formatNumbers(line)
print(line) | [
"def",
"_outputLine",
"(",
"self",
",",
"logevent",
",",
"length",
"=",
"None",
",",
"human",
"=",
"False",
")",
":",
"# adapt timezone output if necessary",
"if",
"self",
".",
"args",
"[",
"'timestamp_format'",
"]",
"!=",
"'none'",
":",
"logevent",
".",
"_r... | Print the final line.
Provides various options (length, human, datetime changes, ...). | [
"Print",
"the",
"final",
"line",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/mlogfilter.py#L83-L112 | train | 225,116 |
rueckstiess/mtools | mtools/mlogfilter/mlogfilter.py | MLogFilterTool._msToString | def _msToString(self, ms):
"""Change milliseconds to hours min sec ms format."""
hr, ms = divmod(ms, 3600000)
mins, ms = divmod(ms, 60000)
secs, mill = divmod(ms, 1000)
return "%ihr %imin %isecs %ims" % (hr, mins, secs, mill) | python | def _msToString(self, ms):
"""Change milliseconds to hours min sec ms format."""
hr, ms = divmod(ms, 3600000)
mins, ms = divmod(ms, 60000)
secs, mill = divmod(ms, 1000)
return "%ihr %imin %isecs %ims" % (hr, mins, secs, mill) | [
"def",
"_msToString",
"(",
"self",
",",
"ms",
")",
":",
"hr",
",",
"ms",
"=",
"divmod",
"(",
"ms",
",",
"3600000",
")",
"mins",
",",
"ms",
"=",
"divmod",
"(",
"ms",
",",
"60000",
")",
"secs",
",",
"mill",
"=",
"divmod",
"(",
"ms",
",",
"1000",
... | Change milliseconds to hours min sec ms format. | [
"Change",
"milliseconds",
"to",
"hours",
"min",
"sec",
"ms",
"format",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/mlogfilter.py#L114-L119 | train | 225,117 |
rueckstiess/mtools | mtools/mlogfilter/mlogfilter.py | MLogFilterTool._changeMs | def _changeMs(self, line):
"""Change the ms part in the string if needed."""
# use the position of the last space instead
try:
last_space_pos = line.rindex(' ')
except ValueError:
return line
else:
end_str = line[last_space_pos:]
new_string = line
if end_str[-2:] == 'ms' and int(end_str[:-2]) >= 1000:
# isolate the number of milliseconds
ms = int(end_str[:-2])
# create the new string with the beginning part of the
# log with the new ms part added in
new_string = (line[:last_space_pos] +
' (' + self._msToString(ms) + ')' +
line[last_space_pos:])
return new_string | python | def _changeMs(self, line):
"""Change the ms part in the string if needed."""
# use the position of the last space instead
try:
last_space_pos = line.rindex(' ')
except ValueError:
return line
else:
end_str = line[last_space_pos:]
new_string = line
if end_str[-2:] == 'ms' and int(end_str[:-2]) >= 1000:
# isolate the number of milliseconds
ms = int(end_str[:-2])
# create the new string with the beginning part of the
# log with the new ms part added in
new_string = (line[:last_space_pos] +
' (' + self._msToString(ms) + ')' +
line[last_space_pos:])
return new_string | [
"def",
"_changeMs",
"(",
"self",
",",
"line",
")",
":",
"# use the position of the last space instead",
"try",
":",
"last_space_pos",
"=",
"line",
".",
"rindex",
"(",
"' '",
")",
"except",
"ValueError",
":",
"return",
"line",
"else",
":",
"end_str",
"=",
"line... | Change the ms part in the string if needed. | [
"Change",
"the",
"ms",
"part",
"in",
"the",
"string",
"if",
"needed",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/mlogfilter.py#L121-L139 | train | 225,118 |
rueckstiess/mtools | mtools/mlogfilter/mlogfilter.py | MLogFilterTool._formatNumbers | def _formatNumbers(self, line):
"""
Format the numbers so that there are commas inserted.
For example: 1200300 becomes 1,200,300.
"""
# below thousands separator syntax only works for
# python 2.7, skip for 2.6
if sys.version_info < (2, 7):
return line
last_index = 0
try:
# find the index of the last } character
last_index = (line.rindex('}') + 1)
end = line[last_index:]
except ValueError:
return line
else:
# split the string on numbers to isolate them
splitted = re.split("(\d+)", end)
for index, val in enumerate(splitted):
converted = 0
try:
converted = int(val)
# if it's not an int pass and don't change the string
except ValueError:
pass
else:
if converted > 1000:
splitted[index] = format(converted, ",d")
return line[:last_index] + ("").join(splitted) | python | def _formatNumbers(self, line):
"""
Format the numbers so that there are commas inserted.
For example: 1200300 becomes 1,200,300.
"""
# below thousands separator syntax only works for
# python 2.7, skip for 2.6
if sys.version_info < (2, 7):
return line
last_index = 0
try:
# find the index of the last } character
last_index = (line.rindex('}') + 1)
end = line[last_index:]
except ValueError:
return line
else:
# split the string on numbers to isolate them
splitted = re.split("(\d+)", end)
for index, val in enumerate(splitted):
converted = 0
try:
converted = int(val)
# if it's not an int pass and don't change the string
except ValueError:
pass
else:
if converted > 1000:
splitted[index] = format(converted, ",d")
return line[:last_index] + ("").join(splitted) | [
"def",
"_formatNumbers",
"(",
"self",
",",
"line",
")",
":",
"# below thousands separator syntax only works for",
"# python 2.7, skip for 2.6",
"if",
"sys",
".",
"version_info",
"<",
"(",
"2",
",",
"7",
")",
":",
"return",
"line",
"last_index",
"=",
"0",
"try",
... | Format the numbers so that there are commas inserted.
For example: 1200300 becomes 1,200,300. | [
"Format",
"the",
"numbers",
"so",
"that",
"there",
"are",
"commas",
"inserted",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/mlogfilter.py#L141-L172 | train | 225,119 |
rueckstiess/mtools | mtools/mlogfilter/mlogfilter.py | MLogFilterTool._datetime_key_for_merge | def _datetime_key_for_merge(self, logevent):
"""Helper method for ordering log lines correctly during merge."""
if not logevent:
# if logfile end is reached, return max datetime to never
# pick this line
return datetime(MAXYEAR, 12, 31, 23, 59, 59, 999999, tzutc())
# if no datetime present (line doesn't have one) return mindate
# to pick this line immediately
return logevent.datetime or datetime(MINYEAR, 1, 1, 0, 0, 0, 0,
tzutc()) | python | def _datetime_key_for_merge(self, logevent):
"""Helper method for ordering log lines correctly during merge."""
if not logevent:
# if logfile end is reached, return max datetime to never
# pick this line
return datetime(MAXYEAR, 12, 31, 23, 59, 59, 999999, tzutc())
# if no datetime present (line doesn't have one) return mindate
# to pick this line immediately
return logevent.datetime or datetime(MINYEAR, 1, 1, 0, 0, 0, 0,
tzutc()) | [
"def",
"_datetime_key_for_merge",
"(",
"self",
",",
"logevent",
")",
":",
"if",
"not",
"logevent",
":",
"# if logfile end is reached, return max datetime to never",
"# pick this line",
"return",
"datetime",
"(",
"MAXYEAR",
",",
"12",
",",
"31",
",",
"23",
",",
"59",... | Helper method for ordering log lines correctly during merge. | [
"Helper",
"method",
"for",
"ordering",
"log",
"lines",
"correctly",
"during",
"merge",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/mlogfilter.py#L174-L184 | train | 225,120 |
rueckstiess/mtools | mtools/mlogfilter/mlogfilter.py | MLogFilterTool._merge_logfiles | def _merge_logfiles(self):
"""Helper method to merge several files together by datetime."""
# open files, read first lines, extract first dates
lines = [next(iter(logfile), None) for logfile in self.args['logfile']]
# adjust lines by timezone
for i in range(len(lines)):
if lines[i] and lines[i].datetime:
lines[i]._datetime = (lines[i].datetime +
timedelta(hours=self.args['timezone']
[i]))
while any(lines):
min_line = min(lines, key=self._datetime_key_for_merge)
min_idx = lines.index(min_line)
if self.args['markers'][min_idx]:
min_line.merge_marker_str = self.args['markers'][min_idx]
yield min_line
# update lines array with a new line from the min_idx'th logfile
lines[min_idx] = next(iter(self.args['logfile'][min_idx]), None)
if lines[min_idx] and lines[min_idx].datetime:
lines[min_idx]._datetime = (
lines[min_idx].datetime +
timedelta(hours=self.args['timezone'][min_idx])) | python | def _merge_logfiles(self):
"""Helper method to merge several files together by datetime."""
# open files, read first lines, extract first dates
lines = [next(iter(logfile), None) for logfile in self.args['logfile']]
# adjust lines by timezone
for i in range(len(lines)):
if lines[i] and lines[i].datetime:
lines[i]._datetime = (lines[i].datetime +
timedelta(hours=self.args['timezone']
[i]))
while any(lines):
min_line = min(lines, key=self._datetime_key_for_merge)
min_idx = lines.index(min_line)
if self.args['markers'][min_idx]:
min_line.merge_marker_str = self.args['markers'][min_idx]
yield min_line
# update lines array with a new line from the min_idx'th logfile
lines[min_idx] = next(iter(self.args['logfile'][min_idx]), None)
if lines[min_idx] and lines[min_idx].datetime:
lines[min_idx]._datetime = (
lines[min_idx].datetime +
timedelta(hours=self.args['timezone'][min_idx])) | [
"def",
"_merge_logfiles",
"(",
"self",
")",
":",
"# open files, read first lines, extract first dates",
"lines",
"=",
"[",
"next",
"(",
"iter",
"(",
"logfile",
")",
",",
"None",
")",
"for",
"logfile",
"in",
"self",
".",
"args",
"[",
"'logfile'",
"]",
"]",
"#... | Helper method to merge several files together by datetime. | [
"Helper",
"method",
"to",
"merge",
"several",
"files",
"together",
"by",
"datetime",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/mlogfilter.py#L186-L212 | train | 225,121 |
rueckstiess/mtools | mtools/mlogfilter/mlogfilter.py | MLogFilterTool.logfile_generator | def logfile_generator(self):
"""Yield each line of the file, or the next line if several files."""
if not self.args['exclude']:
# ask all filters for a start_limit and fast-forward to the maximum
start_limits = [f.start_limit for f in self.filters
if hasattr(f, 'start_limit')]
if start_limits:
for logfile in self.args['logfile']:
logfile.fast_forward(max(start_limits))
if len(self.args['logfile']) > 1:
# merge log files by time
for logevent in self._merge_logfiles():
yield logevent
else:
# only one file
for logevent in self.args['logfile'][0]:
if self.args['timezone'][0] != 0 and logevent.datetime:
logevent._datetime = (logevent.datetime +
timedelta(hours=self
.args['timezone'][0]))
yield logevent | python | def logfile_generator(self):
"""Yield each line of the file, or the next line if several files."""
if not self.args['exclude']:
# ask all filters for a start_limit and fast-forward to the maximum
start_limits = [f.start_limit for f in self.filters
if hasattr(f, 'start_limit')]
if start_limits:
for logfile in self.args['logfile']:
logfile.fast_forward(max(start_limits))
if len(self.args['logfile']) > 1:
# merge log files by time
for logevent in self._merge_logfiles():
yield logevent
else:
# only one file
for logevent in self.args['logfile'][0]:
if self.args['timezone'][0] != 0 and logevent.datetime:
logevent._datetime = (logevent.datetime +
timedelta(hours=self
.args['timezone'][0]))
yield logevent | [
"def",
"logfile_generator",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"args",
"[",
"'exclude'",
"]",
":",
"# ask all filters for a start_limit and fast-forward to the maximum",
"start_limits",
"=",
"[",
"f",
".",
"start_limit",
"for",
"f",
"in",
"self",
"."... | Yield each line of the file, or the next line if several files. | [
"Yield",
"each",
"line",
"of",
"the",
"file",
"or",
"the",
"next",
"line",
"if",
"several",
"files",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/mlogfilter.py#L214-L236 | train | 225,122 |
rueckstiess/mtools | mtools/mlogfilter/filters/mask_filter.py | MaskFilter.setup | def setup(self):
"""
Create mask list.
Consists of all tuples between which this filter accepts lines.
"""
# get start and end of the mask and set a start_limit
if not self.mask_source.start:
raise SystemExit("Can't parse format of %s. Is this a log file or "
"system.profile collection?"
% self.mlogfilter.args['mask'])
self.mask_half_td = timedelta(seconds=self.mlogfilter.args
['mask_size'] / 2)
# load filter mask file
logevent_list = list(self.mask_source)
# define start and end of total mask
self.mask_start = self.mask_source.start - self.mask_half_td
self.mask_end = self.mask_source.end + self.mask_half_td
# consider --mask-center
if self.mlogfilter.args['mask_center'] in ['start', 'both']:
if logevent_list[0].duration:
self.mask_start -= timedelta(milliseconds=logevent_list[0]
.duration)
if self.mlogfilter.args['mask_center'] == 'start':
if logevent_list[-1].duration:
self.mask_end -= timedelta(milliseconds=logevent_list[-1]
.duration)
self.start_limit = self.mask_start
# different center points
if 'mask_center' in self.mlogfilter.args:
if self.mlogfilter.args['mask_center'] in ['start', 'both']:
starts = ([(le.datetime - timedelta(milliseconds=le.duration))
if le.duration is not None else le.datetime
for le in logevent_list if le.datetime])
if self.mlogfilter.args['mask_center'] in ['end', 'both']:
ends = [le.datetime for le in logevent_list if le.datetime]
if self.mlogfilter.args['mask_center'] == 'start':
event_list = sorted(starts)
elif self.mlogfilter.args['mask_center'] == 'end':
event_list = sorted(ends)
elif self.mlogfilter.args['mask_center'] == 'both':
event_list = sorted(zip(starts, ends))
mask_list = []
if len(event_list) == 0:
return
start_point = end_point = None
for e in event_list:
if start_point is None:
start_point, end_point = self._pad_event(e)
continue
next_start = (e[0] if type(e) == tuple else e) - self.mask_half_td
if next_start <= end_point:
end_point = ((e[1] if type(e) == tuple else e) +
self.mask_half_td)
else:
mask_list.append((start_point, end_point))
start_point, end_point = self._pad_event(e)
if start_point:
mask_list.append((start_point, end_point))
self.mask_list = mask_list | python | def setup(self):
"""
Create mask list.
Consists of all tuples between which this filter accepts lines.
"""
# get start and end of the mask and set a start_limit
if not self.mask_source.start:
raise SystemExit("Can't parse format of %s. Is this a log file or "
"system.profile collection?"
% self.mlogfilter.args['mask'])
self.mask_half_td = timedelta(seconds=self.mlogfilter.args
['mask_size'] / 2)
# load filter mask file
logevent_list = list(self.mask_source)
# define start and end of total mask
self.mask_start = self.mask_source.start - self.mask_half_td
self.mask_end = self.mask_source.end + self.mask_half_td
# consider --mask-center
if self.mlogfilter.args['mask_center'] in ['start', 'both']:
if logevent_list[0].duration:
self.mask_start -= timedelta(milliseconds=logevent_list[0]
.duration)
if self.mlogfilter.args['mask_center'] == 'start':
if logevent_list[-1].duration:
self.mask_end -= timedelta(milliseconds=logevent_list[-1]
.duration)
self.start_limit = self.mask_start
# different center points
if 'mask_center' in self.mlogfilter.args:
if self.mlogfilter.args['mask_center'] in ['start', 'both']:
starts = ([(le.datetime - timedelta(milliseconds=le.duration))
if le.duration is not None else le.datetime
for le in logevent_list if le.datetime])
if self.mlogfilter.args['mask_center'] in ['end', 'both']:
ends = [le.datetime for le in logevent_list if le.datetime]
if self.mlogfilter.args['mask_center'] == 'start':
event_list = sorted(starts)
elif self.mlogfilter.args['mask_center'] == 'end':
event_list = sorted(ends)
elif self.mlogfilter.args['mask_center'] == 'both':
event_list = sorted(zip(starts, ends))
mask_list = []
if len(event_list) == 0:
return
start_point = end_point = None
for e in event_list:
if start_point is None:
start_point, end_point = self._pad_event(e)
continue
next_start = (e[0] if type(e) == tuple else e) - self.mask_half_td
if next_start <= end_point:
end_point = ((e[1] if type(e) == tuple else e) +
self.mask_half_td)
else:
mask_list.append((start_point, end_point))
start_point, end_point = self._pad_event(e)
if start_point:
mask_list.append((start_point, end_point))
self.mask_list = mask_list | [
"def",
"setup",
"(",
"self",
")",
":",
"# get start and end of the mask and set a start_limit",
"if",
"not",
"self",
".",
"mask_source",
".",
"start",
":",
"raise",
"SystemExit",
"(",
"\"Can't parse format of %s. Is this a log file or \"",
"\"system.profile collection?\"",
"%... | Create mask list.
Consists of all tuples between which this filter accepts lines. | [
"Create",
"mask",
"list",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/mlogfilter/filters/mask_filter.py#L60-L135 | train | 225,123 |
rueckstiess/mtools | mtools/util/parse_sourcecode.py | source_files | def source_files(mongodb_path):
"""Find source files."""
for root, dirs, files in os.walk(mongodb_path):
for filename in files:
# skip files in dbtests folder
if 'dbtests' in root:
continue
if filename.endswith(('.cpp', '.c', '.h')):
yield os.path.join(root, filename) | python | def source_files(mongodb_path):
"""Find source files."""
for root, dirs, files in os.walk(mongodb_path):
for filename in files:
# skip files in dbtests folder
if 'dbtests' in root:
continue
if filename.endswith(('.cpp', '.c', '.h')):
yield os.path.join(root, filename) | [
"def",
"source_files",
"(",
"mongodb_path",
")",
":",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"mongodb_path",
")",
":",
"for",
"filename",
"in",
"files",
":",
"# skip files in dbtests folder",
"if",
"'dbtests'",
"in",
"root",
... | Find source files. | [
"Find",
"source",
"files",
"."
] | a6a22910c3569c0c8a3908660ca218a4557e4249 | https://github.com/rueckstiess/mtools/blob/a6a22910c3569c0c8a3908660ca218a4557e4249/mtools/util/parse_sourcecode.py#L23-L31 | train | 225,124 |
ansible-community/ara | ara/views/result.py | index | def index():
"""
This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with result.show_result directly and are instead
dynamically generated through javascript for performance purposes.
"""
if current_app.config['ARA_PLAYBOOK_OVERRIDE'] is not None:
override = current_app.config['ARA_PLAYBOOK_OVERRIDE']
results = (models.TaskResult.query
.join(models.Task)
.filter(models.Task.playbook_id.in_(override)))
else:
results = models.TaskResult.query.all()
return render_template('task_result_index.html', results=results) | python | def index():
"""
This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with result.show_result directly and are instead
dynamically generated through javascript for performance purposes.
"""
if current_app.config['ARA_PLAYBOOK_OVERRIDE'] is not None:
override = current_app.config['ARA_PLAYBOOK_OVERRIDE']
results = (models.TaskResult.query
.join(models.Task)
.filter(models.Task.playbook_id.in_(override)))
else:
results = models.TaskResult.query.all()
return render_template('task_result_index.html', results=results) | [
"def",
"index",
"(",
")",
":",
"if",
"current_app",
".",
"config",
"[",
"'ARA_PLAYBOOK_OVERRIDE'",
"]",
"is",
"not",
"None",
":",
"override",
"=",
"current_app",
".",
"config",
"[",
"'ARA_PLAYBOOK_OVERRIDE'",
"]",
"results",
"=",
"(",
"models",
".",
"TaskRes... | This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with result.show_result directly and are instead
dynamically generated through javascript for performance purposes. | [
"This",
"is",
"not",
"served",
"anywhere",
"in",
"the",
"web",
"application",
".",
"It",
"is",
"used",
"explicitly",
"in",
"the",
"context",
"of",
"generating",
"static",
"files",
"since",
"flask",
"-",
"frozen",
"requires",
"url_for",
"s",
"to",
"crawl",
... | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/views/result.py#L28-L44 | train | 225,125 |
ansible-community/ara | ara/models.py | content_sha1 | def content_sha1(context):
"""
Used by the FileContent model to automatically compute the sha1
hash of content before storing it to the database.
"""
try:
content = context.current_parameters['content']
except AttributeError:
content = context
return hashlib.sha1(encodeutils.to_utf8(content)).hexdigest() | python | def content_sha1(context):
"""
Used by the FileContent model to automatically compute the sha1
hash of content before storing it to the database.
"""
try:
content = context.current_parameters['content']
except AttributeError:
content = context
return hashlib.sha1(encodeutils.to_utf8(content)).hexdigest() | [
"def",
"content_sha1",
"(",
"context",
")",
":",
"try",
":",
"content",
"=",
"context",
".",
"current_parameters",
"[",
"'content'",
"]",
"except",
"AttributeError",
":",
"content",
"=",
"context",
"return",
"hashlib",
".",
"sha1",
"(",
"encodeutils",
".",
"... | Used by the FileContent model to automatically compute the sha1
hash of content before storing it to the database. | [
"Used",
"by",
"the",
"FileContent",
"model",
"to",
"automatically",
"compute",
"the",
"sha1",
"hash",
"of",
"content",
"before",
"storing",
"it",
"to",
"the",
"database",
"."
] | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/models.py#L53-L62 | train | 225,126 |
ansible-community/ara | ara/views/about.py | main | def main():
""" Returns the about page """
files = models.File.query
hosts = models.Host.query
facts = models.HostFacts.query
playbooks = models.Playbook.query
records = models.Data.query
tasks = models.Task.query
results = models.TaskResult.query
if current_app.config['ARA_PLAYBOOK_OVERRIDE'] is not None:
override = current_app.config['ARA_PLAYBOOK_OVERRIDE']
files = files.filter(models.File.playbook_id.in_(override))
facts = (facts
.join(models.Host)
.filter(models.Host.playbook_id.in_(override)))
hosts = hosts.filter(models.Host.playbook_id.in_(override))
playbooks = playbooks.filter(models.Playbook.id.in_(override))
records = records.filter(models.Data.playbook_id.in_(override))
tasks = tasks.filter(models.Task.playbook_id.in_(override))
results = (results
.join(models.Task)
.filter(models.Task.playbook_id.in_(override)))
return render_template(
'about.html',
active='about',
files=fast_count(files),
hosts=fast_count(hosts),
facts=fast_count(facts),
playbooks=fast_count(playbooks),
records=fast_count(records),
tasks=fast_count(tasks),
results=fast_count(results)
) | python | def main():
""" Returns the about page """
files = models.File.query
hosts = models.Host.query
facts = models.HostFacts.query
playbooks = models.Playbook.query
records = models.Data.query
tasks = models.Task.query
results = models.TaskResult.query
if current_app.config['ARA_PLAYBOOK_OVERRIDE'] is not None:
override = current_app.config['ARA_PLAYBOOK_OVERRIDE']
files = files.filter(models.File.playbook_id.in_(override))
facts = (facts
.join(models.Host)
.filter(models.Host.playbook_id.in_(override)))
hosts = hosts.filter(models.Host.playbook_id.in_(override))
playbooks = playbooks.filter(models.Playbook.id.in_(override))
records = records.filter(models.Data.playbook_id.in_(override))
tasks = tasks.filter(models.Task.playbook_id.in_(override))
results = (results
.join(models.Task)
.filter(models.Task.playbook_id.in_(override)))
return render_template(
'about.html',
active='about',
files=fast_count(files),
hosts=fast_count(hosts),
facts=fast_count(facts),
playbooks=fast_count(playbooks),
records=fast_count(records),
tasks=fast_count(tasks),
results=fast_count(results)
) | [
"def",
"main",
"(",
")",
":",
"files",
"=",
"models",
".",
"File",
".",
"query",
"hosts",
"=",
"models",
".",
"Host",
".",
"query",
"facts",
"=",
"models",
".",
"HostFacts",
".",
"query",
"playbooks",
"=",
"models",
".",
"Playbook",
".",
"query",
"re... | Returns the about page | [
"Returns",
"the",
"about",
"page"
] | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/views/about.py#L29-L63 | train | 225,127 |
ansible-community/ara | ara/views/host.py | index | def index():
"""
This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with host.show_host directly and are instead
dynamically generated through javascript for performance purposes.
"""
if current_app.config['ARA_PLAYBOOK_OVERRIDE'] is not None:
override = current_app.config['ARA_PLAYBOOK_OVERRIDE']
hosts = (models.Host.query
.filter(models.Host.playbook_id.in_(override)))
else:
hosts = models.Host.query.all()
return render_template('host_index.html', hosts=hosts) | python | def index():
"""
This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with host.show_host directly and are instead
dynamically generated through javascript for performance purposes.
"""
if current_app.config['ARA_PLAYBOOK_OVERRIDE'] is not None:
override = current_app.config['ARA_PLAYBOOK_OVERRIDE']
hosts = (models.Host.query
.filter(models.Host.playbook_id.in_(override)))
else:
hosts = models.Host.query.all()
return render_template('host_index.html', hosts=hosts) | [
"def",
"index",
"(",
")",
":",
"if",
"current_app",
".",
"config",
"[",
"'ARA_PLAYBOOK_OVERRIDE'",
"]",
"is",
"not",
"None",
":",
"override",
"=",
"current_app",
".",
"config",
"[",
"'ARA_PLAYBOOK_OVERRIDE'",
"]",
"hosts",
"=",
"(",
"models",
".",
"Host",
... | This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with host.show_host directly and are instead
dynamically generated through javascript for performance purposes. | [
"This",
"is",
"not",
"served",
"anywhere",
"in",
"the",
"web",
"application",
".",
"It",
"is",
"used",
"explicitly",
"in",
"the",
"context",
"of",
"generating",
"static",
"files",
"since",
"flask",
"-",
"frozen",
"requires",
"url_for",
"s",
"to",
"crawl",
... | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/views/host.py#L31-L46 | train | 225,128 |
ansible-community/ara | ara/config/webapp.py | WebAppConfig.config | def config(self):
""" Returns a dictionary for the loaded configuration """
return {
key: self.__dict__[key]
for key in dir(self)
if key.isupper()
} | python | def config(self):
""" Returns a dictionary for the loaded configuration """
return {
key: self.__dict__[key]
for key in dir(self)
if key.isupper()
} | [
"def",
"config",
"(",
"self",
")",
":",
"return",
"{",
"key",
":",
"self",
".",
"__dict__",
"[",
"key",
"]",
"for",
"key",
"in",
"dir",
"(",
"self",
")",
"if",
"key",
".",
"isupper",
"(",
")",
"}"
] | Returns a dictionary for the loaded configuration | [
"Returns",
"a",
"dictionary",
"for",
"the",
"loaded",
"configuration"
] | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/config/webapp.py#L58-L64 | train | 225,129 |
ansible-community/ara | ara/views/file.py | index | def index():
"""
This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with file.show_file directly and are instead
dynamically generated through javascript for performance purposes.
"""
if current_app.config['ARA_PLAYBOOK_OVERRIDE'] is not None:
override = current_app.config['ARA_PLAYBOOK_OVERRIDE']
files = (models.File.query
.filter(models.File.playbook_id.in_(override)))
else:
files = models.File.query.all()
return render_template('file_index.html', files=files) | python | def index():
"""
This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with file.show_file directly and are instead
dynamically generated through javascript for performance purposes.
"""
if current_app.config['ARA_PLAYBOOK_OVERRIDE'] is not None:
override = current_app.config['ARA_PLAYBOOK_OVERRIDE']
files = (models.File.query
.filter(models.File.playbook_id.in_(override)))
else:
files = models.File.query.all()
return render_template('file_index.html', files=files) | [
"def",
"index",
"(",
")",
":",
"if",
"current_app",
".",
"config",
"[",
"'ARA_PLAYBOOK_OVERRIDE'",
"]",
"is",
"not",
"None",
":",
"override",
"=",
"current_app",
".",
"config",
"[",
"'ARA_PLAYBOOK_OVERRIDE'",
"]",
"files",
"=",
"(",
"models",
".",
"File",
... | This is not served anywhere in the web application.
It is used explicitly in the context of generating static files since
flask-frozen requires url_for's to crawl content.
url_for's are not used with file.show_file directly and are instead
dynamically generated through javascript for performance purposes. | [
"This",
"is",
"not",
"served",
"anywhere",
"in",
"the",
"web",
"application",
".",
"It",
"is",
"used",
"explicitly",
"in",
"the",
"context",
"of",
"generating",
"static",
"files",
"since",
"flask",
"-",
"frozen",
"requires",
"url_for",
"s",
"to",
"crawl",
... | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/views/file.py#L28-L43 | train | 225,130 |
ansible-community/ara | ara/views/file.py | show_file | def show_file(file_):
"""
Returns details of a file
"""
file_ = (models.File.query.get(file_))
if file_ is None:
abort(404)
return render_template('file.html', file_=file_) | python | def show_file(file_):
"""
Returns details of a file
"""
file_ = (models.File.query.get(file_))
if file_ is None:
abort(404)
return render_template('file.html', file_=file_) | [
"def",
"show_file",
"(",
"file_",
")",
":",
"file_",
"=",
"(",
"models",
".",
"File",
".",
"query",
".",
"get",
"(",
"file_",
")",
")",
"if",
"file_",
"is",
"None",
":",
"abort",
"(",
"404",
")",
"return",
"render_template",
"(",
"'file.html'",
",",
... | Returns details of a file | [
"Returns",
"details",
"of",
"a",
"file"
] | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/views/file.py#L47-L55 | train | 225,131 |
ansible-community/ara | ara/webapp.py | configure_db | def configure_db(app):
"""
0.10 is the first version of ARA that ships with a stable database schema.
We can identify a database that originates from before this by checking if
there is an alembic revision available.
If there is no alembic revision available, assume we are running the first
revision which contains the latest state of the database prior to this.
"""
models.db.init_app(app)
log = logging.getLogger('ara.webapp.configure_db')
log.debug('Setting up database...')
if app.config.get('ARA_AUTOCREATE_DATABASE'):
with app.app_context():
migrations = app.config['DB_MIGRATIONS']
flask_migrate.Migrate(app, models.db, directory=migrations)
config = app.extensions['migrate'].migrate.get_config(migrations)
# Verify if the database tables have been created at all
inspector = Inspector.from_engine(models.db.engine)
if len(inspector.get_table_names()) == 0:
log.info('Initializing new DB from scratch')
flask_migrate.upgrade(directory=migrations)
# Get current alembic head revision
script = ScriptDirectory.from_config(config)
head = script.get_current_head()
# Get current revision, if available
connection = models.db.engine.connect()
context = MigrationContext.configure(connection)
current = context.get_current_revision()
if not current:
log.info('Unstable DB schema, stamping original revision')
flask_migrate.stamp(directory=migrations,
revision='da9459a1f71c')
if head != current:
log.info('DB schema out of date, upgrading')
flask_migrate.upgrade(directory=migrations) | python | def configure_db(app):
"""
0.10 is the first version of ARA that ships with a stable database schema.
We can identify a database that originates from before this by checking if
there is an alembic revision available.
If there is no alembic revision available, assume we are running the first
revision which contains the latest state of the database prior to this.
"""
models.db.init_app(app)
log = logging.getLogger('ara.webapp.configure_db')
log.debug('Setting up database...')
if app.config.get('ARA_AUTOCREATE_DATABASE'):
with app.app_context():
migrations = app.config['DB_MIGRATIONS']
flask_migrate.Migrate(app, models.db, directory=migrations)
config = app.extensions['migrate'].migrate.get_config(migrations)
# Verify if the database tables have been created at all
inspector = Inspector.from_engine(models.db.engine)
if len(inspector.get_table_names()) == 0:
log.info('Initializing new DB from scratch')
flask_migrate.upgrade(directory=migrations)
# Get current alembic head revision
script = ScriptDirectory.from_config(config)
head = script.get_current_head()
# Get current revision, if available
connection = models.db.engine.connect()
context = MigrationContext.configure(connection)
current = context.get_current_revision()
if not current:
log.info('Unstable DB schema, stamping original revision')
flask_migrate.stamp(directory=migrations,
revision='da9459a1f71c')
if head != current:
log.info('DB schema out of date, upgrading')
flask_migrate.upgrade(directory=migrations) | [
"def",
"configure_db",
"(",
"app",
")",
":",
"models",
".",
"db",
".",
"init_app",
"(",
"app",
")",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"'ara.webapp.configure_db'",
")",
"log",
".",
"debug",
"(",
"'Setting up database...'",
")",
"if",
"app",
".",... | 0.10 is the first version of ARA that ships with a stable database schema.
We can identify a database that originates from before this by checking if
there is an alembic revision available.
If there is no alembic revision available, assume we are running the first
revision which contains the latest state of the database prior to this. | [
"0",
".",
"10",
"is",
"the",
"first",
"version",
"of",
"ARA",
"that",
"ships",
"with",
"a",
"stable",
"database",
"schema",
".",
"We",
"can",
"identify",
"a",
"database",
"that",
"originates",
"from",
"before",
"this",
"by",
"checking",
"if",
"there",
"i... | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/webapp.py#L248-L288 | train | 225,132 |
ansible-community/ara | ara/webapp.py | configure_cache | def configure_cache(app):
""" Sets up an attribute to cache data in the app context """
log = logging.getLogger('ara.webapp.configure_cache')
log.debug('Configuring cache')
if not getattr(app, '_cache', None):
app._cache = {} | python | def configure_cache(app):
""" Sets up an attribute to cache data in the app context """
log = logging.getLogger('ara.webapp.configure_cache')
log.debug('Configuring cache')
if not getattr(app, '_cache', None):
app._cache = {} | [
"def",
"configure_cache",
"(",
"app",
")",
":",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"'ara.webapp.configure_cache'",
")",
"log",
".",
"debug",
"(",
"'Configuring cache'",
")",
"if",
"not",
"getattr",
"(",
"app",
",",
"'_cache'",
",",
"None",
")",
... | Sets up an attribute to cache data in the app context | [
"Sets",
"up",
"an",
"attribute",
"to",
"cache",
"data",
"in",
"the",
"app",
"context"
] | 15e2d0133c23b6d07438a553bb8149fadff21547 | https://github.com/ansible-community/ara/blob/15e2d0133c23b6d07438a553bb8149fadff21547/ara/webapp.py#L318-L324 | train | 225,133 |
orbingol/NURBS-Python | geomdl/convert.py | bspline_to_nurbs | def bspline_to_nurbs(obj):
""" Converts non-rational parametric shapes to rational ones.
:param obj: B-Spline shape
:type obj: BSpline.Curve, BSpline.Surface or BSpline.Volume
:return: NURBS shape
:rtype: NURBS.Curve, NURBS.Surface or NURBS.Volume
:raises: TypeError
"""
# B-Spline -> NURBS
if isinstance(obj, BSpline.Curve):
return _convert.convert_curve(obj, NURBS)
elif isinstance(obj, BSpline.Surface):
return _convert.convert_surface(obj, NURBS)
elif isinstance(obj, BSpline.Volume):
return _convert.convert_volume(obj, NURBS)
else:
raise TypeError("Input must be an instance of B-Spline curve, surface or volume") | python | def bspline_to_nurbs(obj):
""" Converts non-rational parametric shapes to rational ones.
:param obj: B-Spline shape
:type obj: BSpline.Curve, BSpline.Surface or BSpline.Volume
:return: NURBS shape
:rtype: NURBS.Curve, NURBS.Surface or NURBS.Volume
:raises: TypeError
"""
# B-Spline -> NURBS
if isinstance(obj, BSpline.Curve):
return _convert.convert_curve(obj, NURBS)
elif isinstance(obj, BSpline.Surface):
return _convert.convert_surface(obj, NURBS)
elif isinstance(obj, BSpline.Volume):
return _convert.convert_volume(obj, NURBS)
else:
raise TypeError("Input must be an instance of B-Spline curve, surface or volume") | [
"def",
"bspline_to_nurbs",
"(",
"obj",
")",
":",
"# B-Spline -> NURBS",
"if",
"isinstance",
"(",
"obj",
",",
"BSpline",
".",
"Curve",
")",
":",
"return",
"_convert",
".",
"convert_curve",
"(",
"obj",
",",
"NURBS",
")",
"elif",
"isinstance",
"(",
"obj",
","... | Converts non-rational parametric shapes to rational ones.
:param obj: B-Spline shape
:type obj: BSpline.Curve, BSpline.Surface or BSpline.Volume
:return: NURBS shape
:rtype: NURBS.Curve, NURBS.Surface or NURBS.Volume
:raises: TypeError | [
"Converts",
"non",
"-",
"rational",
"parametric",
"shapes",
"to",
"rational",
"ones",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/convert.py#L14-L31 | train | 225,134 |
orbingol/NURBS-Python | geomdl/convert.py | nurbs_to_bspline | def nurbs_to_bspline(obj, **kwargs):
""" Extracts the non-rational components from rational parametric shapes, if possible.
The possibility of converting a rational shape to a non-rational one depends on the weights vector.
:param obj: NURBS shape
:type obj: NURBS.Curve, NURBS.Surface or NURBS.Volume
:return: B-Spline shape
:rtype: BSpline.Curve, BSpline.Surface or BSpline.Volume
:raises: TypeError
"""
if not obj.rational:
raise TypeError("The input must be a rational shape")
# Get keyword arguments
tol = kwargs.get('tol', 10e-8)
# Test for non-rational component extraction
for w in obj.weights:
if abs(w - 1.0) > tol:
print("Cannot extract non-rational components")
return obj
# NURBS -> B-Spline
if isinstance(obj, NURBS.Curve):
return _convert.convert_curve(obj, BSpline)
elif isinstance(obj, NURBS.Surface):
return _convert.convert_surface(obj, BSpline)
elif isinstance(obj, NURBS.Volume):
return _convert.convert_volume(obj, BSpline)
else:
raise TypeError("Input must be an instance of NURBS curve, surface or volume") | python | def nurbs_to_bspline(obj, **kwargs):
""" Extracts the non-rational components from rational parametric shapes, if possible.
The possibility of converting a rational shape to a non-rational one depends on the weights vector.
:param obj: NURBS shape
:type obj: NURBS.Curve, NURBS.Surface or NURBS.Volume
:return: B-Spline shape
:rtype: BSpline.Curve, BSpline.Surface or BSpline.Volume
:raises: TypeError
"""
if not obj.rational:
raise TypeError("The input must be a rational shape")
# Get keyword arguments
tol = kwargs.get('tol', 10e-8)
# Test for non-rational component extraction
for w in obj.weights:
if abs(w - 1.0) > tol:
print("Cannot extract non-rational components")
return obj
# NURBS -> B-Spline
if isinstance(obj, NURBS.Curve):
return _convert.convert_curve(obj, BSpline)
elif isinstance(obj, NURBS.Surface):
return _convert.convert_surface(obj, BSpline)
elif isinstance(obj, NURBS.Volume):
return _convert.convert_volume(obj, BSpline)
else:
raise TypeError("Input must be an instance of NURBS curve, surface or volume") | [
"def",
"nurbs_to_bspline",
"(",
"obj",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"obj",
".",
"rational",
":",
"raise",
"TypeError",
"(",
"\"The input must be a rational shape\"",
")",
"# Get keyword arguments",
"tol",
"=",
"kwargs",
".",
"get",
"(",
"'to... | Extracts the non-rational components from rational parametric shapes, if possible.
The possibility of converting a rational shape to a non-rational one depends on the weights vector.
:param obj: NURBS shape
:type obj: NURBS.Curve, NURBS.Surface or NURBS.Volume
:return: B-Spline shape
:rtype: BSpline.Curve, BSpline.Surface or BSpline.Volume
:raises: TypeError | [
"Extracts",
"the",
"non",
"-",
"rational",
"components",
"from",
"rational",
"parametric",
"shapes",
"if",
"possible",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/convert.py#L34-L65 | train | 225,135 |
orbingol/NURBS-Python | geomdl/_linalg.py | doolittle | def doolittle(matrix_a):
""" Doolittle's Method for LU-factorization.
:param matrix_a: Input matrix (must be a square matrix)
:type matrix_a: list, tuple
:return: a tuple containing matrices (L,U)
:rtype: tuple
"""
# Initialize L and U matrices
matrix_u = [[0.0 for _ in range(len(matrix_a))] for _ in range(len(matrix_a))]
matrix_l = [[0.0 for _ in range(len(matrix_a))] for _ in range(len(matrix_a))]
# Doolittle Method
for i in range(0, len(matrix_a)):
for k in range(i, len(matrix_a)):
# Upper triangular (U) matrix
matrix_u[i][k] = float(matrix_a[i][k] - sum([matrix_l[i][j] * matrix_u[j][k] for j in range(0, i)]))
# Lower triangular (L) matrix
if i == k:
matrix_l[i][i] = 1.0
else:
matrix_l[k][i] = float(matrix_a[k][i] - sum([matrix_l[k][j] * matrix_u[j][i] for j in range(0, i)]))
# Handle zero division error
try:
matrix_l[k][i] /= float(matrix_u[i][i])
except ZeroDivisionError:
matrix_l[k][i] = 0.0
return matrix_l, matrix_u | python | def doolittle(matrix_a):
""" Doolittle's Method for LU-factorization.
:param matrix_a: Input matrix (must be a square matrix)
:type matrix_a: list, tuple
:return: a tuple containing matrices (L,U)
:rtype: tuple
"""
# Initialize L and U matrices
matrix_u = [[0.0 for _ in range(len(matrix_a))] for _ in range(len(matrix_a))]
matrix_l = [[0.0 for _ in range(len(matrix_a))] for _ in range(len(matrix_a))]
# Doolittle Method
for i in range(0, len(matrix_a)):
for k in range(i, len(matrix_a)):
# Upper triangular (U) matrix
matrix_u[i][k] = float(matrix_a[i][k] - sum([matrix_l[i][j] * matrix_u[j][k] for j in range(0, i)]))
# Lower triangular (L) matrix
if i == k:
matrix_l[i][i] = 1.0
else:
matrix_l[k][i] = float(matrix_a[k][i] - sum([matrix_l[k][j] * matrix_u[j][i] for j in range(0, i)]))
# Handle zero division error
try:
matrix_l[k][i] /= float(matrix_u[i][i])
except ZeroDivisionError:
matrix_l[k][i] = 0.0
return matrix_l, matrix_u | [
"def",
"doolittle",
"(",
"matrix_a",
")",
":",
"# Initialize L and U matrices",
"matrix_u",
"=",
"[",
"[",
"0.0",
"for",
"_",
"in",
"range",
"(",
"len",
"(",
"matrix_a",
")",
")",
"]",
"for",
"_",
"in",
"range",
"(",
"len",
"(",
"matrix_a",
")",
")",
... | Doolittle's Method for LU-factorization.
:param matrix_a: Input matrix (must be a square matrix)
:type matrix_a: list, tuple
:return: a tuple containing matrices (L,U)
:rtype: tuple | [
"Doolittle",
"s",
"Method",
"for",
"LU",
"-",
"factorization",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_linalg.py#L14-L42 | train | 225,136 |
orbingol/NURBS-Python | setup.py | read_files | def read_files(project, ext):
""" Reads files inside the input project directory. """
project_path = os.path.join(os.path.dirname(__file__), project)
file_list = os.listdir(project_path)
flist = []
flist_path = []
for f in file_list:
f_path = os.path.join(project_path, f)
if os.path.isfile(f_path) and f.endswith(ext) and f != "__init__.py":
flist.append(f.split('.')[0])
flist_path.append(f_path)
return flist, flist_path | python | def read_files(project, ext):
""" Reads files inside the input project directory. """
project_path = os.path.join(os.path.dirname(__file__), project)
file_list = os.listdir(project_path)
flist = []
flist_path = []
for f in file_list:
f_path = os.path.join(project_path, f)
if os.path.isfile(f_path) and f.endswith(ext) and f != "__init__.py":
flist.append(f.split('.')[0])
flist_path.append(f_path)
return flist, flist_path | [
"def",
"read_files",
"(",
"project",
",",
"ext",
")",
":",
"project_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"project",
")",
"file_list",
"=",
"os",
".",
"listdir",
"(",
"project_p... | Reads files inside the input project directory. | [
"Reads",
"files",
"inside",
"the",
"input",
"project",
"directory",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/setup.py#L141-L152 | train | 225,137 |
orbingol/NURBS-Python | setup.py | copy_files | def copy_files(src, ext, dst):
""" Copies files with extensions "ext" from "src" to "dst" directory. """
src_path = os.path.join(os.path.dirname(__file__), src)
dst_path = os.path.join(os.path.dirname(__file__), dst)
file_list = os.listdir(src_path)
for f in file_list:
if f == '__init__.py':
continue
f_path = os.path.join(src_path, f)
if os.path.isfile(f_path) and f.endswith(ext):
shutil.copy(f_path, dst_path) | python | def copy_files(src, ext, dst):
""" Copies files with extensions "ext" from "src" to "dst" directory. """
src_path = os.path.join(os.path.dirname(__file__), src)
dst_path = os.path.join(os.path.dirname(__file__), dst)
file_list = os.listdir(src_path)
for f in file_list:
if f == '__init__.py':
continue
f_path = os.path.join(src_path, f)
if os.path.isfile(f_path) and f.endswith(ext):
shutil.copy(f_path, dst_path) | [
"def",
"copy_files",
"(",
"src",
",",
"ext",
",",
"dst",
")",
":",
"src_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"src",
")",
"dst_path",
"=",
"os",
".",
"path",
".",
"join",
... | Copies files with extensions "ext" from "src" to "dst" directory. | [
"Copies",
"files",
"with",
"extensions",
"ext",
"from",
"src",
"to",
"dst",
"directory",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/setup.py#L155-L165 | train | 225,138 |
orbingol/NURBS-Python | setup.py | make_dir | def make_dir(project):
""" Creates the project directory for compiled modules. """
project_path = os.path.join(os.path.dirname(__file__), project)
# Delete the directory and the files inside it
if os.path.exists(project_path):
shutil.rmtree(project_path)
# Create the directory
os.mkdir(project_path)
# We need a __init__.py file inside the directory
with open(os.path.join(project_path, '__init__.py'), 'w') as fp:
fp.write('__version__ = "' + str(get_property('__version__', 'geomdl')) + '"\n')
fp.write('__author__ = "' + str(get_property('__author__', 'geomdl')) + '"\n')
fp.write('__license__ = "' + str(get_property('__license__', 'geomdl')) + '"\n') | python | def make_dir(project):
""" Creates the project directory for compiled modules. """
project_path = os.path.join(os.path.dirname(__file__), project)
# Delete the directory and the files inside it
if os.path.exists(project_path):
shutil.rmtree(project_path)
# Create the directory
os.mkdir(project_path)
# We need a __init__.py file inside the directory
with open(os.path.join(project_path, '__init__.py'), 'w') as fp:
fp.write('__version__ = "' + str(get_property('__version__', 'geomdl')) + '"\n')
fp.write('__author__ = "' + str(get_property('__author__', 'geomdl')) + '"\n')
fp.write('__license__ = "' + str(get_property('__license__', 'geomdl')) + '"\n') | [
"def",
"make_dir",
"(",
"project",
")",
":",
"project_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"project",
")",
"# Delete the directory and the files inside it",
"if",
"os",
".",
"path",
... | Creates the project directory for compiled modules. | [
"Creates",
"the",
"project",
"directory",
"for",
"compiled",
"modules",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/setup.py#L168-L180 | train | 225,139 |
orbingol/NURBS-Python | setup.py | in_argv | def in_argv(arg_list):
""" Checks if any of the elements of the input list is in sys.argv array. """
for arg in sys.argv:
for parg in arg_list:
if parg == arg or arg.startswith(parg):
return True
return False | python | def in_argv(arg_list):
""" Checks if any of the elements of the input list is in sys.argv array. """
for arg in sys.argv:
for parg in arg_list:
if parg == arg or arg.startswith(parg):
return True
return False | [
"def",
"in_argv",
"(",
"arg_list",
")",
":",
"for",
"arg",
"in",
"sys",
".",
"argv",
":",
"for",
"parg",
"in",
"arg_list",
":",
"if",
"parg",
"==",
"arg",
"or",
"arg",
".",
"startswith",
"(",
"parg",
")",
":",
"return",
"True",
"return",
"False"
] | Checks if any of the elements of the input list is in sys.argv array. | [
"Checks",
"if",
"any",
"of",
"the",
"elements",
"of",
"the",
"input",
"list",
"is",
"in",
"sys",
".",
"argv",
"array",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/setup.py#L183-L189 | train | 225,140 |
orbingol/NURBS-Python | geomdl/knotvector.py | generate | def generate(degree, num_ctrlpts, **kwargs):
""" Generates an equally spaced knot vector.
It uses the following equality to generate knot vector: :math:`m = n + p + 1`
where;
* :math:`p`, degree
* :math:`n + 1`, number of control points
* :math:`m + 1`, number of knots
Keyword Arguments:
* ``clamped``: Flag to choose from clamped or unclamped knot vector options. *Default: True*
:param degree: degree
:type degree: int
:param num_ctrlpts: number of control points
:type num_ctrlpts: int
:return: knot vector
:rtype: list
"""
if degree == 0 or num_ctrlpts == 0:
raise ValueError("Input values should be different than zero.")
# Get keyword arguments
clamped = kwargs.get('clamped', True)
# Number of repetitions at the start and end of the array
num_repeat = degree
# Number of knots in the middle
num_segments = num_ctrlpts - (degree + 1)
if not clamped:
# No repetitions at the start and end
num_repeat = 0
# Should conform the rule: m = n + p + 1
num_segments = degree + num_ctrlpts - 1
# First knots
knot_vector = [0.0 for _ in range(0, num_repeat)]
# Middle knots
knot_vector += linspace(0.0, 1.0, num_segments + 2)
# Last knots
knot_vector += [1.0 for _ in range(0, num_repeat)]
# Return auto-generated knot vector
return knot_vector | python | def generate(degree, num_ctrlpts, **kwargs):
""" Generates an equally spaced knot vector.
It uses the following equality to generate knot vector: :math:`m = n + p + 1`
where;
* :math:`p`, degree
* :math:`n + 1`, number of control points
* :math:`m + 1`, number of knots
Keyword Arguments:
* ``clamped``: Flag to choose from clamped or unclamped knot vector options. *Default: True*
:param degree: degree
:type degree: int
:param num_ctrlpts: number of control points
:type num_ctrlpts: int
:return: knot vector
:rtype: list
"""
if degree == 0 or num_ctrlpts == 0:
raise ValueError("Input values should be different than zero.")
# Get keyword arguments
clamped = kwargs.get('clamped', True)
# Number of repetitions at the start and end of the array
num_repeat = degree
# Number of knots in the middle
num_segments = num_ctrlpts - (degree + 1)
if not clamped:
# No repetitions at the start and end
num_repeat = 0
# Should conform the rule: m = n + p + 1
num_segments = degree + num_ctrlpts - 1
# First knots
knot_vector = [0.0 for _ in range(0, num_repeat)]
# Middle knots
knot_vector += linspace(0.0, 1.0, num_segments + 2)
# Last knots
knot_vector += [1.0 for _ in range(0, num_repeat)]
# Return auto-generated knot vector
return knot_vector | [
"def",
"generate",
"(",
"degree",
",",
"num_ctrlpts",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"degree",
"==",
"0",
"or",
"num_ctrlpts",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"Input values should be different than zero.\"",
")",
"# Get keyword arguments",
... | Generates an equally spaced knot vector.
It uses the following equality to generate knot vector: :math:`m = n + p + 1`
where;
* :math:`p`, degree
* :math:`n + 1`, number of control points
* :math:`m + 1`, number of knots
Keyword Arguments:
* ``clamped``: Flag to choose from clamped or unclamped knot vector options. *Default: True*
:param degree: degree
:type degree: int
:param num_ctrlpts: number of control points
:type num_ctrlpts: int
:return: knot vector
:rtype: list | [
"Generates",
"an",
"equally",
"spaced",
"knot",
"vector",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/knotvector.py#L15-L65 | train | 225,141 |
orbingol/NURBS-Python | geomdl/knotvector.py | check | def check(degree, knot_vector, num_ctrlpts):
""" Checks the validity of the input knot vector.
Please refer to The NURBS Book (2nd Edition), p.50 for details.
:param degree: degree of the curve or the surface
:type degree: int
:param knot_vector: knot vector to be checked
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points
:type num_ctrlpts: int
:return: True if the knot vector is valid, False otherwise
:rtype: bool
"""
try:
if knot_vector is None or len(knot_vector) == 0:
raise ValueError("Input knot vector cannot be empty")
except TypeError as e:
print("An error occurred: {}".format(e.args[-1]))
raise TypeError("Knot vector must be a list or tuple")
except Exception:
raise
# Check the formula; m = p + n + 1
if len(knot_vector) != degree + num_ctrlpts + 1:
return False
# Check ascending order
prev_knot = knot_vector[0]
for knot in knot_vector:
if prev_knot > knot:
return False
prev_knot = knot
return True | python | def check(degree, knot_vector, num_ctrlpts):
""" Checks the validity of the input knot vector.
Please refer to The NURBS Book (2nd Edition), p.50 for details.
:param degree: degree of the curve or the surface
:type degree: int
:param knot_vector: knot vector to be checked
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points
:type num_ctrlpts: int
:return: True if the knot vector is valid, False otherwise
:rtype: bool
"""
try:
if knot_vector is None or len(knot_vector) == 0:
raise ValueError("Input knot vector cannot be empty")
except TypeError as e:
print("An error occurred: {}".format(e.args[-1]))
raise TypeError("Knot vector must be a list or tuple")
except Exception:
raise
# Check the formula; m = p + n + 1
if len(knot_vector) != degree + num_ctrlpts + 1:
return False
# Check ascending order
prev_knot = knot_vector[0]
for knot in knot_vector:
if prev_knot > knot:
return False
prev_knot = knot
return True | [
"def",
"check",
"(",
"degree",
",",
"knot_vector",
",",
"num_ctrlpts",
")",
":",
"try",
":",
"if",
"knot_vector",
"is",
"None",
"or",
"len",
"(",
"knot_vector",
")",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"\"Input knot vector cannot be empty\"",
")",
"e... | Checks the validity of the input knot vector.
Please refer to The NURBS Book (2nd Edition), p.50 for details.
:param degree: degree of the curve or the surface
:type degree: int
:param knot_vector: knot vector to be checked
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points
:type num_ctrlpts: int
:return: True if the knot vector is valid, False otherwise
:rtype: bool | [
"Checks",
"the",
"validity",
"of",
"the",
"input",
"knot",
"vector",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/knotvector.py#L99-L133 | train | 225,142 |
orbingol/NURBS-Python | geomdl/fitting.py | interpolate_curve | def interpolate_curve(points, degree, **kwargs):
""" Curve interpolation through the data points.
Please refer to Algorithm A9.1 on The NURBS Book (2nd Edition), pp.369-370 for details.
Keyword Arguments:
* ``centripetal``: activates centripetal parametrization method. *Default: False*
:param points: data points
:type points: list, tuple
:param degree: degree of the output parametric curve
:type degree: int
:return: interpolated B-Spline curve
:rtype: BSpline.Curve
"""
# Keyword arguments
use_centripetal = kwargs.get('centripetal', False)
# Number of control points
num_points = len(points)
# Get uk
uk = compute_params_curve(points, use_centripetal)
# Compute knot vector
kv = compute_knot_vector(degree, num_points, uk)
# Do global interpolation
matrix_a = _build_coeff_matrix(degree, kv, uk, points)
ctrlpts = ginterp(matrix_a, points)
# Generate B-spline curve
curve = BSpline.Curve()
curve.degree = degree
curve.ctrlpts = ctrlpts
curve.knotvector = kv
return curve | python | def interpolate_curve(points, degree, **kwargs):
""" Curve interpolation through the data points.
Please refer to Algorithm A9.1 on The NURBS Book (2nd Edition), pp.369-370 for details.
Keyword Arguments:
* ``centripetal``: activates centripetal parametrization method. *Default: False*
:param points: data points
:type points: list, tuple
:param degree: degree of the output parametric curve
:type degree: int
:return: interpolated B-Spline curve
:rtype: BSpline.Curve
"""
# Keyword arguments
use_centripetal = kwargs.get('centripetal', False)
# Number of control points
num_points = len(points)
# Get uk
uk = compute_params_curve(points, use_centripetal)
# Compute knot vector
kv = compute_knot_vector(degree, num_points, uk)
# Do global interpolation
matrix_a = _build_coeff_matrix(degree, kv, uk, points)
ctrlpts = ginterp(matrix_a, points)
# Generate B-spline curve
curve = BSpline.Curve()
curve.degree = degree
curve.ctrlpts = ctrlpts
curve.knotvector = kv
return curve | [
"def",
"interpolate_curve",
"(",
"points",
",",
"degree",
",",
"*",
"*",
"kwargs",
")",
":",
"# Keyword arguments",
"use_centripetal",
"=",
"kwargs",
".",
"get",
"(",
"'centripetal'",
",",
"False",
")",
"# Number of control points",
"num_points",
"=",
"len",
"("... | Curve interpolation through the data points.
Please refer to Algorithm A9.1 on The NURBS Book (2nd Edition), pp.369-370 for details.
Keyword Arguments:
* ``centripetal``: activates centripetal parametrization method. *Default: False*
:param points: data points
:type points: list, tuple
:param degree: degree of the output parametric curve
:type degree: int
:return: interpolated B-Spline curve
:rtype: BSpline.Curve | [
"Curve",
"interpolation",
"through",
"the",
"data",
"points",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/fitting.py#L16-L53 | train | 225,143 |
orbingol/NURBS-Python | geomdl/fitting.py | interpolate_surface | def interpolate_surface(points, size_u, size_v, degree_u, degree_v, **kwargs):
""" Surface interpolation through the data points.
Please refer to the Algorithm A9.4 on The NURBS Book (2nd Edition), pp.380 for details.
Keyword Arguments:
* ``centripetal``: activates centripetal parametrization method. *Default: False*
:param points: data points
:type points: list, tuple
:param size_u: number of data points on the u-direction
:type size_u: int
:param size_v: number of data points on the v-direction
:type size_v: int
:param degree_u: degree of the output surface for the u-direction
:type degree_u: int
:param degree_v: degree of the output surface for the v-direction
:type degree_v: int
:return: interpolated B-Spline surface
:rtype: BSpline.Surface
"""
# Keyword arguments
use_centripetal = kwargs.get('centripetal', False)
# Get uk and vl
uk, vl = compute_params_surface(points, size_u, size_v, use_centripetal)
# Compute knot vectors
kv_u = compute_knot_vector(degree_u, size_u, uk)
kv_v = compute_knot_vector(degree_v, size_v, vl)
# Do global interpolation on the u-direction
ctrlpts_r = []
for v in range(size_v):
pts = [points[v + (size_v * u)] for u in range(size_u)]
matrix_a = _build_coeff_matrix(degree_u, kv_u, uk, pts)
ctrlpts_r += ginterp(matrix_a, pts)
# Do global interpolation on the v-direction
ctrlpts = []
for u in range(size_u):
pts = [ctrlpts_r[u + (size_u * v)] for v in range(size_v)]
matrix_a = _build_coeff_matrix(degree_v, kv_v, vl, pts)
ctrlpts += ginterp(matrix_a, pts)
# Generate B-spline surface
surf = BSpline.Surface()
surf.degree_u = degree_u
surf.degree_v = degree_v
surf.ctrlpts_size_u = size_u
surf.ctrlpts_size_v = size_v
surf.ctrlpts = ctrlpts
surf.knotvector_u = kv_u
surf.knotvector_v = kv_v
return surf | python | def interpolate_surface(points, size_u, size_v, degree_u, degree_v, **kwargs):
""" Surface interpolation through the data points.
Please refer to the Algorithm A9.4 on The NURBS Book (2nd Edition), pp.380 for details.
Keyword Arguments:
* ``centripetal``: activates centripetal parametrization method. *Default: False*
:param points: data points
:type points: list, tuple
:param size_u: number of data points on the u-direction
:type size_u: int
:param size_v: number of data points on the v-direction
:type size_v: int
:param degree_u: degree of the output surface for the u-direction
:type degree_u: int
:param degree_v: degree of the output surface for the v-direction
:type degree_v: int
:return: interpolated B-Spline surface
:rtype: BSpline.Surface
"""
# Keyword arguments
use_centripetal = kwargs.get('centripetal', False)
# Get uk and vl
uk, vl = compute_params_surface(points, size_u, size_v, use_centripetal)
# Compute knot vectors
kv_u = compute_knot_vector(degree_u, size_u, uk)
kv_v = compute_knot_vector(degree_v, size_v, vl)
# Do global interpolation on the u-direction
ctrlpts_r = []
for v in range(size_v):
pts = [points[v + (size_v * u)] for u in range(size_u)]
matrix_a = _build_coeff_matrix(degree_u, kv_u, uk, pts)
ctrlpts_r += ginterp(matrix_a, pts)
# Do global interpolation on the v-direction
ctrlpts = []
for u in range(size_u):
pts = [ctrlpts_r[u + (size_u * v)] for v in range(size_v)]
matrix_a = _build_coeff_matrix(degree_v, kv_v, vl, pts)
ctrlpts += ginterp(matrix_a, pts)
# Generate B-spline surface
surf = BSpline.Surface()
surf.degree_u = degree_u
surf.degree_v = degree_v
surf.ctrlpts_size_u = size_u
surf.ctrlpts_size_v = size_v
surf.ctrlpts = ctrlpts
surf.knotvector_u = kv_u
surf.knotvector_v = kv_v
return surf | [
"def",
"interpolate_surface",
"(",
"points",
",",
"size_u",
",",
"size_v",
",",
"degree_u",
",",
"degree_v",
",",
"*",
"*",
"kwargs",
")",
":",
"# Keyword arguments",
"use_centripetal",
"=",
"kwargs",
".",
"get",
"(",
"'centripetal'",
",",
"False",
")",
"# G... | Surface interpolation through the data points.
Please refer to the Algorithm A9.4 on The NURBS Book (2nd Edition), pp.380 for details.
Keyword Arguments:
* ``centripetal``: activates centripetal parametrization method. *Default: False*
:param points: data points
:type points: list, tuple
:param size_u: number of data points on the u-direction
:type size_u: int
:param size_v: number of data points on the v-direction
:type size_v: int
:param degree_u: degree of the output surface for the u-direction
:type degree_u: int
:param degree_v: degree of the output surface for the v-direction
:type degree_v: int
:return: interpolated B-Spline surface
:rtype: BSpline.Surface | [
"Surface",
"interpolation",
"through",
"the",
"data",
"points",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/fitting.py#L57-L112 | train | 225,144 |
orbingol/NURBS-Python | geomdl/fitting.py | compute_knot_vector | def compute_knot_vector(degree, num_points, params):
""" Computes a knot vector from the parameter list using averaging method.
Please refer to the Equation 9.8 on The NURBS Book (2nd Edition), pp.365 for details.
:param degree: degree
:type degree: int
:param num_points: number of data points
:type num_points: int
:param params: list of parameters, :math:`\\overline{u}_{k}`
:type params: list, tuple
:return: knot vector
:rtype: list
"""
# Start knot vector
kv = [0.0 for _ in range(degree + 1)]
# Use averaging method (Eqn 9.8) to compute internal knots in the knot vector
for i in range(num_points - degree - 1):
temp_kv = (1.0 / degree) * sum([params[j] for j in range(i + 1, i + degree + 1)])
kv.append(temp_kv)
# End knot vector
kv += [1.0 for _ in range(degree + 1)]
return kv | python | def compute_knot_vector(degree, num_points, params):
""" Computes a knot vector from the parameter list using averaging method.
Please refer to the Equation 9.8 on The NURBS Book (2nd Edition), pp.365 for details.
:param degree: degree
:type degree: int
:param num_points: number of data points
:type num_points: int
:param params: list of parameters, :math:`\\overline{u}_{k}`
:type params: list, tuple
:return: knot vector
:rtype: list
"""
# Start knot vector
kv = [0.0 for _ in range(degree + 1)]
# Use averaging method (Eqn 9.8) to compute internal knots in the knot vector
for i in range(num_points - degree - 1):
temp_kv = (1.0 / degree) * sum([params[j] for j in range(i + 1, i + degree + 1)])
kv.append(temp_kv)
# End knot vector
kv += [1.0 for _ in range(degree + 1)]
return kv | [
"def",
"compute_knot_vector",
"(",
"degree",
",",
"num_points",
",",
"params",
")",
":",
"# Start knot vector",
"kv",
"=",
"[",
"0.0",
"for",
"_",
"in",
"range",
"(",
"degree",
"+",
"1",
")",
"]",
"# Use averaging method (Eqn 9.8) to compute internal knots in the kn... | Computes a knot vector from the parameter list using averaging method.
Please refer to the Equation 9.8 on The NURBS Book (2nd Edition), pp.365 for details.
:param degree: degree
:type degree: int
:param num_points: number of data points
:type num_points: int
:param params: list of parameters, :math:`\\overline{u}_{k}`
:type params: list, tuple
:return: knot vector
:rtype: list | [
"Computes",
"a",
"knot",
"vector",
"from",
"the",
"parameter",
"list",
"using",
"averaging",
"method",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/fitting.py#L358-L383 | train | 225,145 |
orbingol/NURBS-Python | geomdl/fitting.py | ginterp | def ginterp(coeff_matrix, points):
""" Applies global interpolation to the set of data points to find control points.
:param coeff_matrix: coefficient matrix
:type coeff_matrix: list, tuple
:param points: data points
:type points: list, tuple
:return: control points
:rtype: list
"""
# Dimension
dim = len(points[0])
# Number of data points
num_points = len(points)
# Solve system of linear equations
matrix_l, matrix_u = linalg.lu_decomposition(coeff_matrix)
ctrlpts = [[0.0 for _ in range(dim)] for _ in range(num_points)]
for i in range(dim):
b = [pt[i] for pt in points]
y = linalg.forward_substitution(matrix_l, b)
x = linalg.backward_substitution(matrix_u, y)
for j in range(num_points):
ctrlpts[j][i] = x[j]
# Return control points
return ctrlpts | python | def ginterp(coeff_matrix, points):
""" Applies global interpolation to the set of data points to find control points.
:param coeff_matrix: coefficient matrix
:type coeff_matrix: list, tuple
:param points: data points
:type points: list, tuple
:return: control points
:rtype: list
"""
# Dimension
dim = len(points[0])
# Number of data points
num_points = len(points)
# Solve system of linear equations
matrix_l, matrix_u = linalg.lu_decomposition(coeff_matrix)
ctrlpts = [[0.0 for _ in range(dim)] for _ in range(num_points)]
for i in range(dim):
b = [pt[i] for pt in points]
y = linalg.forward_substitution(matrix_l, b)
x = linalg.backward_substitution(matrix_u, y)
for j in range(num_points):
ctrlpts[j][i] = x[j]
# Return control points
return ctrlpts | [
"def",
"ginterp",
"(",
"coeff_matrix",
",",
"points",
")",
":",
"# Dimension",
"dim",
"=",
"len",
"(",
"points",
"[",
"0",
"]",
")",
"# Number of data points",
"num_points",
"=",
"len",
"(",
"points",
")",
"# Solve system of linear equations",
"matrix_l",
",",
... | Applies global interpolation to the set of data points to find control points.
:param coeff_matrix: coefficient matrix
:type coeff_matrix: list, tuple
:param points: data points
:type points: list, tuple
:return: control points
:rtype: list | [
"Applies",
"global",
"interpolation",
"to",
"the",
"set",
"of",
"data",
"points",
"to",
"find",
"control",
"points",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/fitting.py#L509-L536 | train | 225,146 |
orbingol/NURBS-Python | geomdl/fitting.py | _build_coeff_matrix | def _build_coeff_matrix(degree, knotvector, params, points):
""" Builds the coefficient matrix for global interpolation.
This function only uses data points to build the coefficient matrix. Please refer to The NURBS Book (2nd Edition),
pp364-370 for details.
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: list, tuple
:param params: list of parameters
:type params: list, tuple
:param points: data points
:type points: list, tuple
:return: coefficient matrix
:rtype: list
"""
# Number of data points
num_points = len(points)
# Set up coefficient matrix
matrix_a = [[0.0 for _ in range(num_points)] for _ in range(num_points)]
for i in range(num_points):
span = helpers.find_span_linear(degree, knotvector, num_points, params[i])
matrix_a[i][span-degree:span+1] = helpers.basis_function(degree, knotvector, span, params[i])
# Return coefficient matrix
return matrix_a | python | def _build_coeff_matrix(degree, knotvector, params, points):
""" Builds the coefficient matrix for global interpolation.
This function only uses data points to build the coefficient matrix. Please refer to The NURBS Book (2nd Edition),
pp364-370 for details.
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: list, tuple
:param params: list of parameters
:type params: list, tuple
:param points: data points
:type points: list, tuple
:return: coefficient matrix
:rtype: list
"""
# Number of data points
num_points = len(points)
# Set up coefficient matrix
matrix_a = [[0.0 for _ in range(num_points)] for _ in range(num_points)]
for i in range(num_points):
span = helpers.find_span_linear(degree, knotvector, num_points, params[i])
matrix_a[i][span-degree:span+1] = helpers.basis_function(degree, knotvector, span, params[i])
# Return coefficient matrix
return matrix_a | [
"def",
"_build_coeff_matrix",
"(",
"degree",
",",
"knotvector",
",",
"params",
",",
"points",
")",
":",
"# Number of data points",
"num_points",
"=",
"len",
"(",
"points",
")",
"# Set up coefficient matrix",
"matrix_a",
"=",
"[",
"[",
"0.0",
"for",
"_",
"in",
... | Builds the coefficient matrix for global interpolation.
This function only uses data points to build the coefficient matrix. Please refer to The NURBS Book (2nd Edition),
pp364-370 for details.
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: list, tuple
:param params: list of parameters
:type params: list, tuple
:param points: data points
:type points: list, tuple
:return: coefficient matrix
:rtype: list | [
"Builds",
"the",
"coefficient",
"matrix",
"for",
"global",
"interpolation",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/fitting.py#L539-L566 | train | 225,147 |
orbingol/NURBS-Python | geomdl/visualization/vtk_helpers.py | create_render_window | def create_render_window(actors, callbacks, **kwargs):
""" Creates VTK render window with an interactor.
:param actors: list of VTK actors
:type actors: list, tuple
:param callbacks: callback functions for registering custom events
:type callbacks: dict
"""
# Get keyword arguments
figure_size = kwargs.get('figure_size', (800, 600))
camera_position = kwargs.get('camera_position', (0, 0, 100))
# Find camera focal point
center_points = []
for actor in actors:
center_points.append(actor.GetCenter())
camera_focal_point = linalg.vector_mean(*center_points)
# Create camera
camera = vtk.vtkCamera()
camera.SetPosition(*camera_position)
camera.SetFocalPoint(*camera_focal_point)
# Create renderer
renderer = vtk.vtkRenderer()
renderer.SetActiveCamera(camera)
renderer.SetBackground(1.0, 1.0, 1.0)
# Add actors to the scene
for actor in actors:
renderer.AddActor(actor)
# Render window
render_window = vtk.vtkRenderWindow()
render_window.AddRenderer(renderer)
render_window.SetSize(*figure_size)
# Render window interactor
window_interactor = vtk.vtkRenderWindowInteractor()
window_interactor.SetRenderWindow(render_window)
# Add event observers
for cb in callbacks:
window_interactor.AddObserver(cb, callbacks[cb][0], callbacks[cb][1]) # cb name, cb function ref, cb priority
# Render actors
render_window.Render()
# Set window name after render() is called
render_window.SetWindowName("geomdl")
# Use trackball camera
interactor_style = vtk.vtkInteractorStyleTrackballCamera()
window_interactor.SetInteractorStyle(interactor_style)
# Start interactor
window_interactor.Start()
# Return window interactor instance
return window_interactor | python | def create_render_window(actors, callbacks, **kwargs):
""" Creates VTK render window with an interactor.
:param actors: list of VTK actors
:type actors: list, tuple
:param callbacks: callback functions for registering custom events
:type callbacks: dict
"""
# Get keyword arguments
figure_size = kwargs.get('figure_size', (800, 600))
camera_position = kwargs.get('camera_position', (0, 0, 100))
# Find camera focal point
center_points = []
for actor in actors:
center_points.append(actor.GetCenter())
camera_focal_point = linalg.vector_mean(*center_points)
# Create camera
camera = vtk.vtkCamera()
camera.SetPosition(*camera_position)
camera.SetFocalPoint(*camera_focal_point)
# Create renderer
renderer = vtk.vtkRenderer()
renderer.SetActiveCamera(camera)
renderer.SetBackground(1.0, 1.0, 1.0)
# Add actors to the scene
for actor in actors:
renderer.AddActor(actor)
# Render window
render_window = vtk.vtkRenderWindow()
render_window.AddRenderer(renderer)
render_window.SetSize(*figure_size)
# Render window interactor
window_interactor = vtk.vtkRenderWindowInteractor()
window_interactor.SetRenderWindow(render_window)
# Add event observers
for cb in callbacks:
window_interactor.AddObserver(cb, callbacks[cb][0], callbacks[cb][1]) # cb name, cb function ref, cb priority
# Render actors
render_window.Render()
# Set window name after render() is called
render_window.SetWindowName("geomdl")
# Use trackball camera
interactor_style = vtk.vtkInteractorStyleTrackballCamera()
window_interactor.SetInteractorStyle(interactor_style)
# Start interactor
window_interactor.Start()
# Return window interactor instance
return window_interactor | [
"def",
"create_render_window",
"(",
"actors",
",",
"callbacks",
",",
"*",
"*",
"kwargs",
")",
":",
"# Get keyword arguments",
"figure_size",
"=",
"kwargs",
".",
"get",
"(",
"'figure_size'",
",",
"(",
"800",
",",
"600",
")",
")",
"camera_position",
"=",
"kwar... | Creates VTK render window with an interactor.
:param actors: list of VTK actors
:type actors: list, tuple
:param callbacks: callback functions for registering custom events
:type callbacks: dict | [
"Creates",
"VTK",
"render",
"window",
"with",
"an",
"interactor",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/vtk_helpers.py#L14-L73 | train | 225,148 |
orbingol/NURBS-Python | geomdl/visualization/vtk_helpers.py | create_color | def create_color(color):
""" Creates VTK-compatible RGB color from a color string.
:param color: color
:type color: str
:return: RGB color values
:rtype: list
"""
if color[0] == "#":
# Convert hex string to RGB
return [int(color[i:i + 2], 16) / 255 for i in range(1, 7, 2)]
else:
# Create a named colors instance
nc = vtk.vtkNamedColors()
return nc.GetColor3d(color) | python | def create_color(color):
""" Creates VTK-compatible RGB color from a color string.
:param color: color
:type color: str
:return: RGB color values
:rtype: list
"""
if color[0] == "#":
# Convert hex string to RGB
return [int(color[i:i + 2], 16) / 255 for i in range(1, 7, 2)]
else:
# Create a named colors instance
nc = vtk.vtkNamedColors()
return nc.GetColor3d(color) | [
"def",
"create_color",
"(",
"color",
")",
":",
"if",
"color",
"[",
"0",
"]",
"==",
"\"#\"",
":",
"# Convert hex string to RGB",
"return",
"[",
"int",
"(",
"color",
"[",
"i",
":",
"i",
"+",
"2",
"]",
",",
"16",
")",
"/",
"255",
"for",
"i",
"in",
"... | Creates VTK-compatible RGB color from a color string.
:param color: color
:type color: str
:return: RGB color values
:rtype: list | [
"Creates",
"VTK",
"-",
"compatible",
"RGB",
"color",
"from",
"a",
"color",
"string",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/vtk_helpers.py#L76-L90 | train | 225,149 |
orbingol/NURBS-Python | geomdl/visualization/vtk_helpers.py | create_actor_pts | def create_actor_pts(pts, color, **kwargs):
""" Creates a VTK actor for rendering scatter plots.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
point_size = kwargs.get('size', 5)
point_sphere = kwargs.get('point_as_sphere', True)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create a PolyData object and add points
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
# Run vertex glyph filter on the points array
vertex_filter = vtk.vtkVertexGlyphFilter()
vertex_filter.SetInputData(polydata)
# Map ploy data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(vertex_filter.GetOutputPort())
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
actor.GetProperty().SetPointSize(point_size)
actor.GetProperty().SetRenderPointsAsSpheres(point_sphere)
# Return the actor
return actor | python | def create_actor_pts(pts, color, **kwargs):
""" Creates a VTK actor for rendering scatter plots.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
point_size = kwargs.get('size', 5)
point_sphere = kwargs.get('point_as_sphere', True)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create a PolyData object and add points
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
# Run vertex glyph filter on the points array
vertex_filter = vtk.vtkVertexGlyphFilter()
vertex_filter.SetInputData(polydata)
# Map ploy data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(vertex_filter.GetOutputPort())
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
actor.GetProperty().SetPointSize(point_size)
actor.GetProperty().SetRenderPointsAsSpheres(point_sphere)
# Return the actor
return actor | [
"def",
"create_actor_pts",
"(",
"pts",
",",
"color",
",",
"*",
"*",
"kwargs",
")",
":",
"# Keyword arguments",
"array_name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
",",
"\"\"",
")",
"array_index",
"=",
"kwargs",
".",
"get",
"(",
"'index'",
",",
"0",
... | Creates a VTK actor for rendering scatter plots.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor | [
"Creates",
"a",
"VTK",
"actor",
"for",
"rendering",
"scatter",
"plots",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/vtk_helpers.py#L93-L135 | train | 225,150 |
orbingol/NURBS-Python | geomdl/visualization/vtk_helpers.py | create_actor_polygon | def create_actor_polygon(pts, color, **kwargs):
""" Creates a VTK actor for rendering polygons.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
line_width = kwargs.get('size', 1.0)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Number of points
num_points = points.GetNumberOfPoints()
# Create lines
cells = vtk.vtkCellArray()
for i in range(num_points - 1):
line = vtk.vtkLine()
line.GetPointIds().SetId(0, i)
line.GetPointIds().SetId(1, i + 1)
cells.InsertNextCell(line)
# Create a PolyData object and add points & lines
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetLines(cells)
# Map poly data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputDataObject(polydata)
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
actor.GetProperty().SetLineWidth(line_width)
# Return the actor
return actor | python | def create_actor_polygon(pts, color, **kwargs):
""" Creates a VTK actor for rendering polygons.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
line_width = kwargs.get('size', 1.0)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Number of points
num_points = points.GetNumberOfPoints()
# Create lines
cells = vtk.vtkCellArray()
for i in range(num_points - 1):
line = vtk.vtkLine()
line.GetPointIds().SetId(0, i)
line.GetPointIds().SetId(1, i + 1)
cells.InsertNextCell(line)
# Create a PolyData object and add points & lines
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetLines(cells)
# Map poly data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputDataObject(polydata)
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
actor.GetProperty().SetLineWidth(line_width)
# Return the actor
return actor | [
"def",
"create_actor_polygon",
"(",
"pts",
",",
"color",
",",
"*",
"*",
"kwargs",
")",
":",
"# Keyword arguments",
"array_name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
",",
"\"\"",
")",
"array_index",
"=",
"kwargs",
".",
"get",
"(",
"'index'",
",",
"0... | Creates a VTK actor for rendering polygons.
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor | [
"Creates",
"a",
"VTK",
"actor",
"for",
"rendering",
"polygons",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/vtk_helpers.py#L138-L186 | train | 225,151 |
orbingol/NURBS-Python | geomdl/visualization/vtk_helpers.py | create_actor_mesh | def create_actor_mesh(pts, lines, color, **kwargs):
""" Creates a VTK actor for rendering quadrilateral plots.
:param pts: points
:type pts: vtkFloatArray
:param lines: point connectivity information
:type lines: vtkIntArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
line_width = kwargs.get('size', 0.5)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create lines
cells = vtk.vtkCellArray()
for line in lines:
pline = vtk.vtkPolyLine()
pline.GetPointIds().SetNumberOfIds(5)
for i in range(len(line)):
pline.GetPointIds().SetId(i, line[i])
pline.GetPointIds().SetId(4, line[0])
cells.InsertNextCell(pline)
# Create a PolyData object and add points & lines
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetLines(cells)
# Map poly data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputDataObject(polydata)
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
actor.GetProperty().SetLineWidth(line_width)
# Return the actor
return actor | python | def create_actor_mesh(pts, lines, color, **kwargs):
""" Creates a VTK actor for rendering quadrilateral plots.
:param pts: points
:type pts: vtkFloatArray
:param lines: point connectivity information
:type lines: vtkIntArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
line_width = kwargs.get('size', 0.5)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create lines
cells = vtk.vtkCellArray()
for line in lines:
pline = vtk.vtkPolyLine()
pline.GetPointIds().SetNumberOfIds(5)
for i in range(len(line)):
pline.GetPointIds().SetId(i, line[i])
pline.GetPointIds().SetId(4, line[0])
cells.InsertNextCell(pline)
# Create a PolyData object and add points & lines
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetLines(cells)
# Map poly data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputDataObject(polydata)
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
actor.GetProperty().SetLineWidth(line_width)
# Return the actor
return actor | [
"def",
"create_actor_mesh",
"(",
"pts",
",",
"lines",
",",
"color",
",",
"*",
"*",
"kwargs",
")",
":",
"# Keyword arguments",
"array_name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
",",
"\"\"",
")",
"array_index",
"=",
"kwargs",
".",
"get",
"(",
"'index... | Creates a VTK actor for rendering quadrilateral plots.
:param pts: points
:type pts: vtkFloatArray
:param lines: point connectivity information
:type lines: vtkIntArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor | [
"Creates",
"a",
"VTK",
"actor",
"for",
"rendering",
"quadrilateral",
"plots",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/vtk_helpers.py#L189-L238 | train | 225,152 |
orbingol/NURBS-Python | geomdl/visualization/vtk_helpers.py | create_actor_tri | def create_actor_tri(pts, tris, color, **kwargs):
""" Creates a VTK actor for rendering triangulated surface plots.
:param pts: points
:type pts: vtkFloatArray
:param tris: list of triangle indices
:type tris: ndarray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create triangles
triangles = vtk.vtkCellArray()
for tri in tris:
tmp = vtk.vtkTriangle()
for i, v in enumerate(tri):
tmp.GetPointIds().SetId(i, v)
triangles.InsertNextCell(tmp)
# Create a PolyData object and add points & triangles
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetPolys(triangles)
# Map poly data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputDataObject(polydata)
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
# Return the actor
return actor | python | def create_actor_tri(pts, tris, color, **kwargs):
""" Creates a VTK actor for rendering triangulated surface plots.
:param pts: points
:type pts: vtkFloatArray
:param tris: list of triangle indices
:type tris: ndarray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create triangles
triangles = vtk.vtkCellArray()
for tri in tris:
tmp = vtk.vtkTriangle()
for i, v in enumerate(tri):
tmp.GetPointIds().SetId(i, v)
triangles.InsertNextCell(tmp)
# Create a PolyData object and add points & triangles
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
polydata.SetPolys(triangles)
# Map poly data to the graphics primitives
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputDataObject(polydata)
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
# Return the actor
return actor | [
"def",
"create_actor_tri",
"(",
"pts",
",",
"tris",
",",
"color",
",",
"*",
"*",
"kwargs",
")",
":",
"# Keyword arguments",
"array_name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
",",
"\"\"",
")",
"array_index",
"=",
"kwargs",
".",
"get",
"(",
"'index'"... | Creates a VTK actor for rendering triangulated surface plots.
:param pts: points
:type pts: vtkFloatArray
:param tris: list of triangle indices
:type tris: ndarray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor | [
"Creates",
"a",
"VTK",
"actor",
"for",
"rendering",
"triangulated",
"surface",
"plots",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/vtk_helpers.py#L241-L286 | train | 225,153 |
orbingol/NURBS-Python | geomdl/visualization/vtk_helpers.py | create_actor_hexahedron | def create_actor_hexahedron(grid, color, **kwargs):
""" Creates a VTK actor for rendering voxels using hexahedron elements.
:param grid: grid
:type grid: ndarray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
# Create hexahedron elements
points = vtk.vtkPoints()
hexarray = vtk.vtkCellArray()
for j, pt in enumerate(grid):
tmp = vtk.vtkHexahedron()
fb = pt[0]
for i, v in enumerate(fb):
points.InsertNextPoint(v)
tmp.GetPointIds().SetId(i, i + (j * 8))
ft = pt[-1]
for i, v in enumerate(ft):
points.InsertNextPoint(v)
tmp.GetPointIds().SetId(i + 4, i + 4 + (j * 8))
hexarray.InsertNextCell(tmp)
# Create an unstructured grid object and add points & hexahedron elements
ugrid = vtk.vtkUnstructuredGrid()
ugrid.SetPoints(points)
ugrid.SetCells(tmp.GetCellType(), hexarray)
# ugrid.InsertNextCell(tmp.GetCellType(), tmp.GetPointIds())
# Map unstructured grid to the graphics primitives
mapper = vtk.vtkDataSetMapper()
mapper.SetInputDataObject(ugrid)
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
# Return the actor
return actor | python | def create_actor_hexahedron(grid, color, **kwargs):
""" Creates a VTK actor for rendering voxels using hexahedron elements.
:param grid: grid
:type grid: ndarray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
# Create hexahedron elements
points = vtk.vtkPoints()
hexarray = vtk.vtkCellArray()
for j, pt in enumerate(grid):
tmp = vtk.vtkHexahedron()
fb = pt[0]
for i, v in enumerate(fb):
points.InsertNextPoint(v)
tmp.GetPointIds().SetId(i, i + (j * 8))
ft = pt[-1]
for i, v in enumerate(ft):
points.InsertNextPoint(v)
tmp.GetPointIds().SetId(i + 4, i + 4 + (j * 8))
hexarray.InsertNextCell(tmp)
# Create an unstructured grid object and add points & hexahedron elements
ugrid = vtk.vtkUnstructuredGrid()
ugrid.SetPoints(points)
ugrid.SetCells(tmp.GetCellType(), hexarray)
# ugrid.InsertNextCell(tmp.GetCellType(), tmp.GetPointIds())
# Map unstructured grid to the graphics primitives
mapper = vtk.vtkDataSetMapper()
mapper.SetInputDataObject(ugrid)
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
# Return the actor
return actor | [
"def",
"create_actor_hexahedron",
"(",
"grid",
",",
"color",
",",
"*",
"*",
"kwargs",
")",
":",
"# Keyword arguments",
"array_name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
",",
"\"\"",
")",
"array_index",
"=",
"kwargs",
".",
"get",
"(",
"'index'",
",",
... | Creates a VTK actor for rendering voxels using hexahedron elements.
:param grid: grid
:type grid: ndarray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor | [
"Creates",
"a",
"VTK",
"actor",
"for",
"rendering",
"voxels",
"using",
"hexahedron",
"elements",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/vtk_helpers.py#L289-L336 | train | 225,154 |
orbingol/NURBS-Python | geomdl/visualization/vtk_helpers.py | create_actor_delaunay | def create_actor_delaunay(pts, color, **kwargs):
""" Creates a VTK actor for rendering triangulated plots using Delaunay triangulation.
Keyword Arguments:
* ``d3d``: flag to choose between Delaunay2D (``False``) and Delaunay3D (``True``). *Default: False*
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
use_delaunay3d = kwargs.get("d3d", False)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create a PolyData object and add points
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
# Apply Delaunay triangulation on the poly data object
triangulation = vtk.vtkDelaunay3D() if use_delaunay3d else vtk.vtkDelaunay2D()
triangulation.SetInputData(polydata)
# Map triangulated surface to the graphics primitives
mapper = vtk.vtkDataSetMapper()
mapper.SetInputConnection(triangulation.GetOutputPort())
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
# Return the actor
return actor | python | def create_actor_delaunay(pts, color, **kwargs):
""" Creates a VTK actor for rendering triangulated plots using Delaunay triangulation.
Keyword Arguments:
* ``d3d``: flag to choose between Delaunay2D (``False``) and Delaunay3D (``True``). *Default: False*
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor
"""
# Keyword arguments
array_name = kwargs.get('name', "")
array_index = kwargs.get('index', 0)
use_delaunay3d = kwargs.get("d3d", False)
# Create points
points = vtk.vtkPoints()
points.SetData(pts)
# Create a PolyData object and add points
polydata = vtk.vtkPolyData()
polydata.SetPoints(points)
# Apply Delaunay triangulation on the poly data object
triangulation = vtk.vtkDelaunay3D() if use_delaunay3d else vtk.vtkDelaunay2D()
triangulation.SetInputData(polydata)
# Map triangulated surface to the graphics primitives
mapper = vtk.vtkDataSetMapper()
mapper.SetInputConnection(triangulation.GetOutputPort())
mapper.SetArrayName(array_name)
mapper.SetArrayId(array_index)
# Create an actor and set its properties
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(*color)
# Return the actor
return actor | [
"def",
"create_actor_delaunay",
"(",
"pts",
",",
"color",
",",
"*",
"*",
"kwargs",
")",
":",
"# Keyword arguments",
"array_name",
"=",
"kwargs",
".",
"get",
"(",
"'name'",
",",
"\"\"",
")",
"array_index",
"=",
"kwargs",
".",
"get",
"(",
"'index'",
",",
"... | Creates a VTK actor for rendering triangulated plots using Delaunay triangulation.
Keyword Arguments:
* ``d3d``: flag to choose between Delaunay2D (``False``) and Delaunay3D (``True``). *Default: False*
:param pts: points
:type pts: vtkFloatArray
:param color: actor color
:type color: list
:return: a VTK actor
:rtype: vtkActor | [
"Creates",
"a",
"VTK",
"actor",
"for",
"rendering",
"triangulated",
"plots",
"using",
"Delaunay",
"triangulation",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/vtk_helpers.py#L339-L381 | train | 225,155 |
orbingol/NURBS-Python | geomdl/compatibility.py | flip_ctrlpts_u | def flip_ctrlpts_u(ctrlpts, size_u, size_v):
""" Flips a list of 1-dimensional control points from u-row order to v-row order.
**u-row order**: each row corresponds to a list of u values
**v-row order**: each row corresponds to a list of v values
:param ctrlpts: control points in u-row order
:type ctrlpts: list, tuple
:param size_u: size in u-direction
:type size_u: int
:param size_v: size in v-direction
:type size_v: int
:return: control points in v-row order
:rtype: list
"""
new_ctrlpts = []
for i in range(0, size_u):
for j in range(0, size_v):
temp = [float(c) for c in ctrlpts[i + (j * size_u)]]
new_ctrlpts.append(temp)
return new_ctrlpts | python | def flip_ctrlpts_u(ctrlpts, size_u, size_v):
""" Flips a list of 1-dimensional control points from u-row order to v-row order.
**u-row order**: each row corresponds to a list of u values
**v-row order**: each row corresponds to a list of v values
:param ctrlpts: control points in u-row order
:type ctrlpts: list, tuple
:param size_u: size in u-direction
:type size_u: int
:param size_v: size in v-direction
:type size_v: int
:return: control points in v-row order
:rtype: list
"""
new_ctrlpts = []
for i in range(0, size_u):
for j in range(0, size_v):
temp = [float(c) for c in ctrlpts[i + (j * size_u)]]
new_ctrlpts.append(temp)
return new_ctrlpts | [
"def",
"flip_ctrlpts_u",
"(",
"ctrlpts",
",",
"size_u",
",",
"size_v",
")",
":",
"new_ctrlpts",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"size_u",
")",
":",
"for",
"j",
"in",
"range",
"(",
"0",
",",
"size_v",
")",
":",
"temp",
"="... | Flips a list of 1-dimensional control points from u-row order to v-row order.
**u-row order**: each row corresponds to a list of u values
**v-row order**: each row corresponds to a list of v values
:param ctrlpts: control points in u-row order
:type ctrlpts: list, tuple
:param size_u: size in u-direction
:type size_u: int
:param size_v: size in v-direction
:type size_v: int
:return: control points in v-row order
:rtype: list | [
"Flips",
"a",
"list",
"of",
"1",
"-",
"dimensional",
"control",
"points",
"from",
"u",
"-",
"row",
"order",
"to",
"v",
"-",
"row",
"order",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/compatibility.py#L11-L33 | train | 225,156 |
orbingol/NURBS-Python | geomdl/compatibility.py | generate_ctrlptsw | def generate_ctrlptsw(ctrlpts):
""" Generates weighted control points from unweighted ones in 1-D.
This function
#. Takes in a 1-D control points list whose coordinates are organized in (x, y, z, w) format
#. converts into (x*w, y*w, z*w, w) format
#. Returns the result
:param ctrlpts: 1-D control points (P)
:type ctrlpts: list
:return: 1-D weighted control points (Pw)
:rtype: list
"""
# Multiply control points by weight
new_ctrlpts = []
for cpt in ctrlpts:
temp = [float(pt * cpt[-1]) for pt in cpt]
temp[-1] = float(cpt[-1])
new_ctrlpts.append(temp)
return new_ctrlpts | python | def generate_ctrlptsw(ctrlpts):
""" Generates weighted control points from unweighted ones in 1-D.
This function
#. Takes in a 1-D control points list whose coordinates are organized in (x, y, z, w) format
#. converts into (x*w, y*w, z*w, w) format
#. Returns the result
:param ctrlpts: 1-D control points (P)
:type ctrlpts: list
:return: 1-D weighted control points (Pw)
:rtype: list
"""
# Multiply control points by weight
new_ctrlpts = []
for cpt in ctrlpts:
temp = [float(pt * cpt[-1]) for pt in cpt]
temp[-1] = float(cpt[-1])
new_ctrlpts.append(temp)
return new_ctrlpts | [
"def",
"generate_ctrlptsw",
"(",
"ctrlpts",
")",
":",
"# Multiply control points by weight",
"new_ctrlpts",
"=",
"[",
"]",
"for",
"cpt",
"in",
"ctrlpts",
":",
"temp",
"=",
"[",
"float",
"(",
"pt",
"*",
"cpt",
"[",
"-",
"1",
"]",
")",
"for",
"pt",
"in",
... | Generates weighted control points from unweighted ones in 1-D.
This function
#. Takes in a 1-D control points list whose coordinates are organized in (x, y, z, w) format
#. converts into (x*w, y*w, z*w, w) format
#. Returns the result
:param ctrlpts: 1-D control points (P)
:type ctrlpts: list
:return: 1-D weighted control points (Pw)
:rtype: list | [
"Generates",
"weighted",
"control",
"points",
"from",
"unweighted",
"ones",
"in",
"1",
"-",
"D",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/compatibility.py#L86-L107 | train | 225,157 |
orbingol/NURBS-Python | geomdl/compatibility.py | generate_ctrlpts_weights | def generate_ctrlpts_weights(ctrlpts):
""" Generates unweighted control points from weighted ones in 1-D.
This function
#. Takes in 1-D control points list whose coordinates are organized in (x*w, y*w, z*w, w) format
#. Converts the input control points list into (x, y, z, w) format
#. Returns the result
:param ctrlpts: 1-D control points (P)
:type ctrlpts: list
:return: 1-D weighted control points (Pw)
:rtype: list
"""
# Divide control points by weight
new_ctrlpts = []
for cpt in ctrlpts:
temp = [float(pt / cpt[-1]) for pt in cpt]
temp[-1] = float(cpt[-1])
new_ctrlpts.append(temp)
return new_ctrlpts | python | def generate_ctrlpts_weights(ctrlpts):
""" Generates unweighted control points from weighted ones in 1-D.
This function
#. Takes in 1-D control points list whose coordinates are organized in (x*w, y*w, z*w, w) format
#. Converts the input control points list into (x, y, z, w) format
#. Returns the result
:param ctrlpts: 1-D control points (P)
:type ctrlpts: list
:return: 1-D weighted control points (Pw)
:rtype: list
"""
# Divide control points by weight
new_ctrlpts = []
for cpt in ctrlpts:
temp = [float(pt / cpt[-1]) for pt in cpt]
temp[-1] = float(cpt[-1])
new_ctrlpts.append(temp)
return new_ctrlpts | [
"def",
"generate_ctrlpts_weights",
"(",
"ctrlpts",
")",
":",
"# Divide control points by weight",
"new_ctrlpts",
"=",
"[",
"]",
"for",
"cpt",
"in",
"ctrlpts",
":",
"temp",
"=",
"[",
"float",
"(",
"pt",
"/",
"cpt",
"[",
"-",
"1",
"]",
")",
"for",
"pt",
"i... | Generates unweighted control points from weighted ones in 1-D.
This function
#. Takes in 1-D control points list whose coordinates are organized in (x*w, y*w, z*w, w) format
#. Converts the input control points list into (x, y, z, w) format
#. Returns the result
:param ctrlpts: 1-D control points (P)
:type ctrlpts: list
:return: 1-D weighted control points (Pw)
:rtype: list | [
"Generates",
"unweighted",
"control",
"points",
"from",
"weighted",
"ones",
"in",
"1",
"-",
"D",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/compatibility.py#L139-L160 | train | 225,158 |
orbingol/NURBS-Python | geomdl/compatibility.py | combine_ctrlpts_weights | def combine_ctrlpts_weights(ctrlpts, weights=None):
""" Multiplies control points by the weights to generate weighted control points.
This function is dimension agnostic, i.e. control points can be in any dimension but weights should be 1D.
The ``weights`` function parameter can be set to None to let the function generate a weights vector composed of
1.0 values. This feature can be used to convert B-Spline basis to NURBS basis.
:param ctrlpts: unweighted control points
:type ctrlpts: list, tuple
:param weights: weights vector; if set to None, a weights vector of 1.0s will be automatically generated
:type weights: list, tuple or None
:return: weighted control points
:rtype: list
"""
if weights is None:
weights = [1.0 for _ in range(len(ctrlpts))]
ctrlptsw = []
for pt, w in zip(ctrlpts, weights):
temp = [float(c * w) for c in pt]
temp.append(float(w))
ctrlptsw.append(temp)
return ctrlptsw | python | def combine_ctrlpts_weights(ctrlpts, weights=None):
""" Multiplies control points by the weights to generate weighted control points.
This function is dimension agnostic, i.e. control points can be in any dimension but weights should be 1D.
The ``weights`` function parameter can be set to None to let the function generate a weights vector composed of
1.0 values. This feature can be used to convert B-Spline basis to NURBS basis.
:param ctrlpts: unweighted control points
:type ctrlpts: list, tuple
:param weights: weights vector; if set to None, a weights vector of 1.0s will be automatically generated
:type weights: list, tuple or None
:return: weighted control points
:rtype: list
"""
if weights is None:
weights = [1.0 for _ in range(len(ctrlpts))]
ctrlptsw = []
for pt, w in zip(ctrlpts, weights):
temp = [float(c * w) for c in pt]
temp.append(float(w))
ctrlptsw.append(temp)
return ctrlptsw | [
"def",
"combine_ctrlpts_weights",
"(",
"ctrlpts",
",",
"weights",
"=",
"None",
")",
":",
"if",
"weights",
"is",
"None",
":",
"weights",
"=",
"[",
"1.0",
"for",
"_",
"in",
"range",
"(",
"len",
"(",
"ctrlpts",
")",
")",
"]",
"ctrlptsw",
"=",
"[",
"]",
... | Multiplies control points by the weights to generate weighted control points.
This function is dimension agnostic, i.e. control points can be in any dimension but weights should be 1D.
The ``weights`` function parameter can be set to None to let the function generate a weights vector composed of
1.0 values. This feature can be used to convert B-Spline basis to NURBS basis.
:param ctrlpts: unweighted control points
:type ctrlpts: list, tuple
:param weights: weights vector; if set to None, a weights vector of 1.0s will be automatically generated
:type weights: list, tuple or None
:return: weighted control points
:rtype: list | [
"Multiplies",
"control",
"points",
"by",
"the",
"weights",
"to",
"generate",
"weighted",
"control",
"points",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/compatibility.py#L190-L214 | train | 225,159 |
orbingol/NURBS-Python | geomdl/compatibility.py | separate_ctrlpts_weights | def separate_ctrlpts_weights(ctrlptsw):
""" Divides weighted control points by weights to generate unweighted control points and weights vector.
This function is dimension agnostic, i.e. control points can be in any dimension but the last element of the array
should indicate the weight.
:param ctrlptsw: weighted control points
:type ctrlptsw: list, tuple
:return: unweighted control points and weights vector
:rtype: list
"""
ctrlpts = []
weights = []
for ptw in ctrlptsw:
temp = [float(pw / ptw[-1]) for pw in ptw[:-1]]
ctrlpts.append(temp)
weights.append(ptw[-1])
return [ctrlpts, weights] | python | def separate_ctrlpts_weights(ctrlptsw):
""" Divides weighted control points by weights to generate unweighted control points and weights vector.
This function is dimension agnostic, i.e. control points can be in any dimension but the last element of the array
should indicate the weight.
:param ctrlptsw: weighted control points
:type ctrlptsw: list, tuple
:return: unweighted control points and weights vector
:rtype: list
"""
ctrlpts = []
weights = []
for ptw in ctrlptsw:
temp = [float(pw / ptw[-1]) for pw in ptw[:-1]]
ctrlpts.append(temp)
weights.append(ptw[-1])
return [ctrlpts, weights] | [
"def",
"separate_ctrlpts_weights",
"(",
"ctrlptsw",
")",
":",
"ctrlpts",
"=",
"[",
"]",
"weights",
"=",
"[",
"]",
"for",
"ptw",
"in",
"ctrlptsw",
":",
"temp",
"=",
"[",
"float",
"(",
"pw",
"/",
"ptw",
"[",
"-",
"1",
"]",
")",
"for",
"pw",
"in",
"... | Divides weighted control points by weights to generate unweighted control points and weights vector.
This function is dimension agnostic, i.e. control points can be in any dimension but the last element of the array
should indicate the weight.
:param ctrlptsw: weighted control points
:type ctrlptsw: list, tuple
:return: unweighted control points and weights vector
:rtype: list | [
"Divides",
"weighted",
"control",
"points",
"by",
"weights",
"to",
"generate",
"unweighted",
"control",
"points",
"and",
"weights",
"vector",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/compatibility.py#L217-L235 | train | 225,160 |
orbingol/NURBS-Python | geomdl/compatibility.py | flip_ctrlpts2d_file | def flip_ctrlpts2d_file(file_in='', file_out='ctrlpts_flip.txt'):
""" Flips u and v directions of a 2D control points file and saves flipped coordinates to a file.
:param file_in: name of the input file (to be read)
:type file_in: str
:param file_out: name of the output file (to be saved)
:type file_out: str
:raises IOError: an error occurred reading or writing the file
"""
# Read control points
ctrlpts2d, size_u, size_v = _read_ctrltps2d_file(file_in)
# Flip control points array
new_ctrlpts2d = flip_ctrlpts2d(ctrlpts2d, size_u, size_v)
# Save new control points
_save_ctrlpts2d_file(new_ctrlpts2d, size_u, size_v, file_out) | python | def flip_ctrlpts2d_file(file_in='', file_out='ctrlpts_flip.txt'):
""" Flips u and v directions of a 2D control points file and saves flipped coordinates to a file.
:param file_in: name of the input file (to be read)
:type file_in: str
:param file_out: name of the output file (to be saved)
:type file_out: str
:raises IOError: an error occurred reading or writing the file
"""
# Read control points
ctrlpts2d, size_u, size_v = _read_ctrltps2d_file(file_in)
# Flip control points array
new_ctrlpts2d = flip_ctrlpts2d(ctrlpts2d, size_u, size_v)
# Save new control points
_save_ctrlpts2d_file(new_ctrlpts2d, size_u, size_v, file_out) | [
"def",
"flip_ctrlpts2d_file",
"(",
"file_in",
"=",
"''",
",",
"file_out",
"=",
"'ctrlpts_flip.txt'",
")",
":",
"# Read control points",
"ctrlpts2d",
",",
"size_u",
",",
"size_v",
"=",
"_read_ctrltps2d_file",
"(",
"file_in",
")",
"# Flip control points array",
"new_ctr... | Flips u and v directions of a 2D control points file and saves flipped coordinates to a file.
:param file_in: name of the input file (to be read)
:type file_in: str
:param file_out: name of the output file (to be saved)
:type file_out: str
:raises IOError: an error occurred reading or writing the file | [
"Flips",
"u",
"and",
"v",
"directions",
"of",
"a",
"2D",
"control",
"points",
"file",
"and",
"saves",
"flipped",
"coordinates",
"to",
"a",
"file",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/compatibility.py#L238-L254 | train | 225,161 |
orbingol/NURBS-Python | geomdl/compatibility.py | generate_ctrlptsw2d_file | def generate_ctrlptsw2d_file(file_in='', file_out='ctrlptsw.txt'):
""" Generates weighted control points from unweighted ones in 2-D.
This function
#. Takes in a 2-D control points file whose coordinates are organized in (x, y, z, w) format
#. Converts into (x*w, y*w, z*w, w) format
#. Saves the result to a file
Therefore, the resultant file could be a direct input of the NURBS.Surface class.
:param file_in: name of the input file (to be read)
:type file_in: str
:param file_out: name of the output file (to be saved)
:type file_out: str
:raises IOError: an error occurred reading or writing the file
"""
# Read control points
ctrlpts2d, size_u, size_v = _read_ctrltps2d_file(file_in)
# Multiply control points by weight
new_ctrlpts2d = generate_ctrlptsw2d(ctrlpts2d)
# Save new control points
_save_ctrlpts2d_file(new_ctrlpts2d, size_u, size_v, file_out) | python | def generate_ctrlptsw2d_file(file_in='', file_out='ctrlptsw.txt'):
""" Generates weighted control points from unweighted ones in 2-D.
This function
#. Takes in a 2-D control points file whose coordinates are organized in (x, y, z, w) format
#. Converts into (x*w, y*w, z*w, w) format
#. Saves the result to a file
Therefore, the resultant file could be a direct input of the NURBS.Surface class.
:param file_in: name of the input file (to be read)
:type file_in: str
:param file_out: name of the output file (to be saved)
:type file_out: str
:raises IOError: an error occurred reading or writing the file
"""
# Read control points
ctrlpts2d, size_u, size_v = _read_ctrltps2d_file(file_in)
# Multiply control points by weight
new_ctrlpts2d = generate_ctrlptsw2d(ctrlpts2d)
# Save new control points
_save_ctrlpts2d_file(new_ctrlpts2d, size_u, size_v, file_out) | [
"def",
"generate_ctrlptsw2d_file",
"(",
"file_in",
"=",
"''",
",",
"file_out",
"=",
"'ctrlptsw.txt'",
")",
":",
"# Read control points",
"ctrlpts2d",
",",
"size_u",
",",
"size_v",
"=",
"_read_ctrltps2d_file",
"(",
"file_in",
")",
"# Multiply control points by weight",
... | Generates weighted control points from unweighted ones in 2-D.
This function
#. Takes in a 2-D control points file whose coordinates are organized in (x, y, z, w) format
#. Converts into (x*w, y*w, z*w, w) format
#. Saves the result to a file
Therefore, the resultant file could be a direct input of the NURBS.Surface class.
:param file_in: name of the input file (to be read)
:type file_in: str
:param file_out: name of the output file (to be saved)
:type file_out: str
:raises IOError: an error occurred reading or writing the file | [
"Generates",
"weighted",
"control",
"points",
"from",
"unweighted",
"ones",
"in",
"2",
"-",
"D",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/compatibility.py#L257-L281 | train | 225,162 |
orbingol/NURBS-Python | geomdl/visualization/VisVTK.py | VisConfig.keypress_callback | def keypress_callback(self, obj, ev):
""" VTK callback for keypress events.
Keypress events:
* ``e``: exit the application
* ``p``: pick object (hover the mouse and then press to pick)
* ``f``: fly to point (click somewhere in the window and press to fly)
* ``r``: reset the camera
* ``s`` and ``w``: switch between solid and wireframe modes
* ``b``: change background color
* ``m``: change color of the picked object
* ``d``: print debug information (of picked object, point, etc.)
* ``h``: change object visibility
* ``n``: reset object visibility
* ``arrow keys``: pan the model
Please refer to `vtkInteractorStyle <https://vtk.org/doc/nightly/html/classvtkInteractorStyle.html>`_ class
reference for more details.
:param obj: render window interactor
:type obj: vtkRenderWindowInteractor
:param ev: event name
:type ev: str
"""
key = obj.GetKeySym() # pressed key (as str)
render_window = obj.GetRenderWindow() # vtkRenderWindow
renderer = render_window.GetRenderers().GetFirstRenderer() # vtkRenderer
picker = obj.GetPicker() # vtkPropPicker
actor = picker.GetActor() # vtkActor
# Custom keypress events
if key == 'Up':
camera = renderer.GetActiveCamera() # vtkCamera
camera.Pitch(2.5)
if key == 'Down':
camera = renderer.GetActiveCamera() # vtkCamera
camera.Pitch(-2.5)
if key == 'Left':
camera = renderer.GetActiveCamera() # vtkCamera
camera.Yaw(-2.5)
if key == 'Right':
camera = renderer.GetActiveCamera() # vtkCamera
camera.Yaw(2.5)
if key == 'b':
if self._bg_id >= len(self._bg):
self._bg_id = 0
renderer.SetBackground(*self._bg[self._bg_id])
self._bg_id += 1
if key == 'm':
if actor is not None:
actor.GetProperty().SetColor(random(), random(), random())
if key == 'd':
if actor is not None:
print("Name:", actor.GetMapper().GetArrayName())
print("Index:", actor.GetMapper().GetArrayId())
print("Selected point:", picker.GetSelectionPoint()[0:2])
print("# of visible actors:", renderer.VisibleActorCount())
if key == 'h':
if actor is not None:
actor.SetVisibility(not actor.GetVisibility())
if key == 'n':
actors = renderer.GetActors() # vtkActorCollection
for actor in actors:
actor.VisibilityOn()
# Update render window
render_window.Render() | python | def keypress_callback(self, obj, ev):
""" VTK callback for keypress events.
Keypress events:
* ``e``: exit the application
* ``p``: pick object (hover the mouse and then press to pick)
* ``f``: fly to point (click somewhere in the window and press to fly)
* ``r``: reset the camera
* ``s`` and ``w``: switch between solid and wireframe modes
* ``b``: change background color
* ``m``: change color of the picked object
* ``d``: print debug information (of picked object, point, etc.)
* ``h``: change object visibility
* ``n``: reset object visibility
* ``arrow keys``: pan the model
Please refer to `vtkInteractorStyle <https://vtk.org/doc/nightly/html/classvtkInteractorStyle.html>`_ class
reference for more details.
:param obj: render window interactor
:type obj: vtkRenderWindowInteractor
:param ev: event name
:type ev: str
"""
key = obj.GetKeySym() # pressed key (as str)
render_window = obj.GetRenderWindow() # vtkRenderWindow
renderer = render_window.GetRenderers().GetFirstRenderer() # vtkRenderer
picker = obj.GetPicker() # vtkPropPicker
actor = picker.GetActor() # vtkActor
# Custom keypress events
if key == 'Up':
camera = renderer.GetActiveCamera() # vtkCamera
camera.Pitch(2.5)
if key == 'Down':
camera = renderer.GetActiveCamera() # vtkCamera
camera.Pitch(-2.5)
if key == 'Left':
camera = renderer.GetActiveCamera() # vtkCamera
camera.Yaw(-2.5)
if key == 'Right':
camera = renderer.GetActiveCamera() # vtkCamera
camera.Yaw(2.5)
if key == 'b':
if self._bg_id >= len(self._bg):
self._bg_id = 0
renderer.SetBackground(*self._bg[self._bg_id])
self._bg_id += 1
if key == 'm':
if actor is not None:
actor.GetProperty().SetColor(random(), random(), random())
if key == 'd':
if actor is not None:
print("Name:", actor.GetMapper().GetArrayName())
print("Index:", actor.GetMapper().GetArrayId())
print("Selected point:", picker.GetSelectionPoint()[0:2])
print("# of visible actors:", renderer.VisibleActorCount())
if key == 'h':
if actor is not None:
actor.SetVisibility(not actor.GetVisibility())
if key == 'n':
actors = renderer.GetActors() # vtkActorCollection
for actor in actors:
actor.VisibilityOn()
# Update render window
render_window.Render() | [
"def",
"keypress_callback",
"(",
"self",
",",
"obj",
",",
"ev",
")",
":",
"key",
"=",
"obj",
".",
"GetKeySym",
"(",
")",
"# pressed key (as str)",
"render_window",
"=",
"obj",
".",
"GetRenderWindow",
"(",
")",
"# vtkRenderWindow",
"renderer",
"=",
"render_wind... | VTK callback for keypress events.
Keypress events:
* ``e``: exit the application
* ``p``: pick object (hover the mouse and then press to pick)
* ``f``: fly to point (click somewhere in the window and press to fly)
* ``r``: reset the camera
* ``s`` and ``w``: switch between solid and wireframe modes
* ``b``: change background color
* ``m``: change color of the picked object
* ``d``: print debug information (of picked object, point, etc.)
* ``h``: change object visibility
* ``n``: reset object visibility
* ``arrow keys``: pan the model
Please refer to `vtkInteractorStyle <https://vtk.org/doc/nightly/html/classvtkInteractorStyle.html>`_ class
reference for more details.
:param obj: render window interactor
:type obj: vtkRenderWindowInteractor
:param ev: event name
:type ev: str | [
"VTK",
"callback",
"for",
"keypress",
"events",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/VisVTK.py#L46-L112 | train | 225,163 |
orbingol/NURBS-Python | geomdl/_voxelize.py | generate_voxel_grid | def generate_voxel_grid(bbox, szval, use_cubes=False):
""" Generates the voxel grid with the desired size.
:param bbox: bounding box
:type bbox: list, tuple
:param szval: size in x-, y-, z-directions
:type szval: list, tuple
:param use_cubes: use cube voxels instead of cuboid ones
:type use_cubes: bool
:return: voxel grid
:rtype: list
"""
# Input validation
if szval[0] <= 1 or szval[1] <= 1 or szval[2] <= 1:
raise GeomdlException("Size values must be bigger than 1", data=dict(sizevals=szval))
# Find step size for each direction
steps = [float(bbox[1][idx] - bbox[0][idx]) / float(szval[idx] - 1) for idx in range(0, 3)]
# It is possible to use cubes instead of cuboids
if use_cubes:
min_val = min(*steps)
steps = [min_val for _ in range(0, 3)]
# Find range in each direction
ranges = [list(linalg.frange(bbox[0][idx], bbox[1][idx], steps[idx])) for idx in range(0, 3)]
voxel_grid = []
for u in ranges[0]:
for v in ranges[1]:
for w in ranges[2]:
bbmin = [u, v, w]
bbmax = [k + l for k, l in zip(bbmin, steps)]
voxel_grid.append([bbmin, bbmax])
return voxel_grid | python | def generate_voxel_grid(bbox, szval, use_cubes=False):
""" Generates the voxel grid with the desired size.
:param bbox: bounding box
:type bbox: list, tuple
:param szval: size in x-, y-, z-directions
:type szval: list, tuple
:param use_cubes: use cube voxels instead of cuboid ones
:type use_cubes: bool
:return: voxel grid
:rtype: list
"""
# Input validation
if szval[0] <= 1 or szval[1] <= 1 or szval[2] <= 1:
raise GeomdlException("Size values must be bigger than 1", data=dict(sizevals=szval))
# Find step size for each direction
steps = [float(bbox[1][idx] - bbox[0][idx]) / float(szval[idx] - 1) for idx in range(0, 3)]
# It is possible to use cubes instead of cuboids
if use_cubes:
min_val = min(*steps)
steps = [min_val for _ in range(0, 3)]
# Find range in each direction
ranges = [list(linalg.frange(bbox[0][idx], bbox[1][idx], steps[idx])) for idx in range(0, 3)]
voxel_grid = []
for u in ranges[0]:
for v in ranges[1]:
for w in ranges[2]:
bbmin = [u, v, w]
bbmax = [k + l for k, l in zip(bbmin, steps)]
voxel_grid.append([bbmin, bbmax])
return voxel_grid | [
"def",
"generate_voxel_grid",
"(",
"bbox",
",",
"szval",
",",
"use_cubes",
"=",
"False",
")",
":",
"# Input validation",
"if",
"szval",
"[",
"0",
"]",
"<=",
"1",
"or",
"szval",
"[",
"1",
"]",
"<=",
"1",
"or",
"szval",
"[",
"2",
"]",
"<=",
"1",
":",... | Generates the voxel grid with the desired size.
:param bbox: bounding box
:type bbox: list, tuple
:param szval: size in x-, y-, z-directions
:type szval: list, tuple
:param use_cubes: use cube voxels instead of cuboid ones
:type use_cubes: bool
:return: voxel grid
:rtype: list | [
"Generates",
"the",
"voxel",
"grid",
"with",
"the",
"desired",
"size",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_voxelize.py#L49-L83 | train | 225,164 |
orbingol/NURBS-Python | geomdl/_exchange.py | process_template | def process_template(file_src):
""" Process Jinja2 template input
:param file_src: file contents
:type file_src: str
"""
def tmpl_sqrt(x):
""" Square-root of 'x' """
return math.sqrt(x)
def tmpl_cubert(x):
""" Cube-root of 'x' """
return x ** (1.0 / 3.0) if x >= 0 else -(-x) ** (1.0 / 3.0)
def tmpl_pow(x, y):
""" 'x' to the power 'y' """
return math.pow(x, y)
# Check if it is possible to import 'jinja2'
try:
import jinja2
except ImportError:
raise GeomdlException("Please install 'jinja2' package to use templated input: pip install jinja2")
# Replace jinja2 template tags for compatibility
fsrc = file_src.replace("{%", "<%").replace("%}", "%>").replace("{{", "<{").replace("}}", "}>")
# Generate Jinja2 environment
env = jinja2.Environment(
loader=jinja2.BaseLoader(),
trim_blocks=True,
block_start_string='<%', block_end_string='%>',
variable_start_string='<{', variable_end_string='}>'
).from_string(fsrc)
# Load custom functions into the Jinja2 environment
template_funcs = dict(
knot_vector=utilities.generate_knot_vector,
sqrt=tmpl_sqrt,
cubert=tmpl_cubert,
pow=tmpl_pow,
)
for k, v in template_funcs.items():
env.globals[k] = v
# Process Jinja2 template functions & variables inside the input file
return env.render() | python | def process_template(file_src):
""" Process Jinja2 template input
:param file_src: file contents
:type file_src: str
"""
def tmpl_sqrt(x):
""" Square-root of 'x' """
return math.sqrt(x)
def tmpl_cubert(x):
""" Cube-root of 'x' """
return x ** (1.0 / 3.0) if x >= 0 else -(-x) ** (1.0 / 3.0)
def tmpl_pow(x, y):
""" 'x' to the power 'y' """
return math.pow(x, y)
# Check if it is possible to import 'jinja2'
try:
import jinja2
except ImportError:
raise GeomdlException("Please install 'jinja2' package to use templated input: pip install jinja2")
# Replace jinja2 template tags for compatibility
fsrc = file_src.replace("{%", "<%").replace("%}", "%>").replace("{{", "<{").replace("}}", "}>")
# Generate Jinja2 environment
env = jinja2.Environment(
loader=jinja2.BaseLoader(),
trim_blocks=True,
block_start_string='<%', block_end_string='%>',
variable_start_string='<{', variable_end_string='}>'
).from_string(fsrc)
# Load custom functions into the Jinja2 environment
template_funcs = dict(
knot_vector=utilities.generate_knot_vector,
sqrt=tmpl_sqrt,
cubert=tmpl_cubert,
pow=tmpl_pow,
)
for k, v in template_funcs.items():
env.globals[k] = v
# Process Jinja2 template functions & variables inside the input file
return env.render() | [
"def",
"process_template",
"(",
"file_src",
")",
":",
"def",
"tmpl_sqrt",
"(",
"x",
")",
":",
"\"\"\" Square-root of 'x' \"\"\"",
"return",
"math",
".",
"sqrt",
"(",
"x",
")",
"def",
"tmpl_cubert",
"(",
"x",
")",
":",
"\"\"\" Cube-root of 'x' \"\"\"",
"return",
... | Process Jinja2 template input
:param file_src: file contents
:type file_src: str | [
"Process",
"Jinja2",
"template",
"input"
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_exchange.py#L21-L67 | train | 225,165 |
orbingol/NURBS-Python | geomdl/_exchange.py | import_surf_mesh | def import_surf_mesh(file_name):
""" Generates a NURBS surface object from a mesh file.
:param file_name: input mesh file
:type file_name: str
:return: a NURBS surface
:rtype: NURBS.Surface
"""
raw_content = read_file(file_name)
raw_content = raw_content.split("\n")
content = []
for rc in raw_content:
temp = rc.strip().split()
content.append(temp)
# 1st line defines the dimension and it must be 3
if int(content[0][0]) != 3:
raise TypeError("Input mesh '" + str(file_name) + "' must be 3-dimensional")
# Create a NURBS surface instance and fill with the data read from mesh file
surf = shortcuts.generate_surface(rational=True)
# 2nd line is the degrees
surf.degree_u = int(content[1][0])
surf.degree_v = int(content[1][1])
# 3rd line is the number of weighted control points in u and v directions
dim_u = int(content[2][0])
dim_v = int(content[2][1])
# Starting from 6th line, we have the weighted control points
ctrlpts_end = 5 + (dim_u * dim_v)
ctrlpts_mesh = content[5:ctrlpts_end]
# mesh files have the control points in u-row order format
ctrlpts = compatibility.flip_ctrlpts_u(ctrlpts_mesh, dim_u, dim_v)
# mesh files store control points in format (x, y, z, w)
ctrlptsw = compatibility.generate_ctrlptsw(ctrlpts)
# Set control points
surf.set_ctrlpts(ctrlptsw, dim_u, dim_v)
# 4th and 5th lines are knot vectors
surf.knotvector_u = [float(u) for u in content[3]]
surf.knotvector_v = [float(v) for v in content[4]]
# Return the surface instance
return surf | python | def import_surf_mesh(file_name):
""" Generates a NURBS surface object from a mesh file.
:param file_name: input mesh file
:type file_name: str
:return: a NURBS surface
:rtype: NURBS.Surface
"""
raw_content = read_file(file_name)
raw_content = raw_content.split("\n")
content = []
for rc in raw_content:
temp = rc.strip().split()
content.append(temp)
# 1st line defines the dimension and it must be 3
if int(content[0][0]) != 3:
raise TypeError("Input mesh '" + str(file_name) + "' must be 3-dimensional")
# Create a NURBS surface instance and fill with the data read from mesh file
surf = shortcuts.generate_surface(rational=True)
# 2nd line is the degrees
surf.degree_u = int(content[1][0])
surf.degree_v = int(content[1][1])
# 3rd line is the number of weighted control points in u and v directions
dim_u = int(content[2][0])
dim_v = int(content[2][1])
# Starting from 6th line, we have the weighted control points
ctrlpts_end = 5 + (dim_u * dim_v)
ctrlpts_mesh = content[5:ctrlpts_end]
# mesh files have the control points in u-row order format
ctrlpts = compatibility.flip_ctrlpts_u(ctrlpts_mesh, dim_u, dim_v)
# mesh files store control points in format (x, y, z, w)
ctrlptsw = compatibility.generate_ctrlptsw(ctrlpts)
# Set control points
surf.set_ctrlpts(ctrlptsw, dim_u, dim_v)
# 4th and 5th lines are knot vectors
surf.knotvector_u = [float(u) for u in content[3]]
surf.knotvector_v = [float(v) for v in content[4]]
# Return the surface instance
return surf | [
"def",
"import_surf_mesh",
"(",
"file_name",
")",
":",
"raw_content",
"=",
"read_file",
"(",
"file_name",
")",
"raw_content",
"=",
"raw_content",
".",
"split",
"(",
"\"\\n\"",
")",
"content",
"=",
"[",
"]",
"for",
"rc",
"in",
"raw_content",
":",
"temp",
"=... | Generates a NURBS surface object from a mesh file.
:param file_name: input mesh file
:type file_name: str
:return: a NURBS surface
:rtype: NURBS.Surface | [
"Generates",
"a",
"NURBS",
"surface",
"object",
"from",
"a",
"mesh",
"file",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_exchange.py#L102-L150 | train | 225,166 |
orbingol/NURBS-Python | geomdl/_exchange.py | import_vol_mesh | def import_vol_mesh(file_name):
""" Generates a NURBS volume object from a mesh file.
:param file_name: input mesh file
:type file_name: str
:return: a NURBS volume
:rtype: NURBS.Volume
"""
raw_content = read_file(file_name)
raw_content = raw_content.split("\n")
content = []
for rc in raw_content:
temp = rc.strip().split()
content.append(temp)
# 1st line defines the dimension and it must be 3
if int(content[0][0]) != 3:
raise TypeError("Input mesh '" + str(file_name) + "' must be 3-dimensional")
# Create a NURBS surface instance and fill with the data read from mesh file
vol = shortcuts.generate_volume(rational=True)
# 2nd line is the degrees
vol.degree_u = int(content[1][0])
vol.degree_v = int(content[1][1])
vol.degree_w = int(content[1][2])
# 3rd line is the number of weighted control points in u, v, w directions
dim_u = int(content[2][0])
dim_v = int(content[2][1])
dim_w = int(content[2][2])
# Starting from 7th line, we have the weighted control points
surf_cpts = dim_u * dim_v
ctrlpts_end = 6 + (surf_cpts * dim_w)
ctrlpts_mesh = content[6:ctrlpts_end]
# mesh files have the control points in u-row order format
ctrlpts = []
for i in range(dim_w - 1):
ctrlpts += compatibility.flip_ctrlpts_u(ctrlpts_mesh[surf_cpts * i:surf_cpts * (i + 1)], dim_u, dim_v)
# mesh files store control points in format (x, y, z, w)
ctrlptsw = compatibility.generate_ctrlptsw(ctrlpts)
# Set control points
vol.set_ctrlpts(ctrlptsw, dim_u, dim_v, dim_w)
# 4th, 5th and 6th lines are knot vectors
vol.knotvector_u = [float(u) for u in content[3]]
vol.knotvector_v = [float(v) for v in content[4]]
vol.knotvector_w = [float(w) for w in content[5]]
# Return the volume instance
return vol | python | def import_vol_mesh(file_name):
""" Generates a NURBS volume object from a mesh file.
:param file_name: input mesh file
:type file_name: str
:return: a NURBS volume
:rtype: NURBS.Volume
"""
raw_content = read_file(file_name)
raw_content = raw_content.split("\n")
content = []
for rc in raw_content:
temp = rc.strip().split()
content.append(temp)
# 1st line defines the dimension and it must be 3
if int(content[0][0]) != 3:
raise TypeError("Input mesh '" + str(file_name) + "' must be 3-dimensional")
# Create a NURBS surface instance and fill with the data read from mesh file
vol = shortcuts.generate_volume(rational=True)
# 2nd line is the degrees
vol.degree_u = int(content[1][0])
vol.degree_v = int(content[1][1])
vol.degree_w = int(content[1][2])
# 3rd line is the number of weighted control points in u, v, w directions
dim_u = int(content[2][0])
dim_v = int(content[2][1])
dim_w = int(content[2][2])
# Starting from 7th line, we have the weighted control points
surf_cpts = dim_u * dim_v
ctrlpts_end = 6 + (surf_cpts * dim_w)
ctrlpts_mesh = content[6:ctrlpts_end]
# mesh files have the control points in u-row order format
ctrlpts = []
for i in range(dim_w - 1):
ctrlpts += compatibility.flip_ctrlpts_u(ctrlpts_mesh[surf_cpts * i:surf_cpts * (i + 1)], dim_u, dim_v)
# mesh files store control points in format (x, y, z, w)
ctrlptsw = compatibility.generate_ctrlptsw(ctrlpts)
# Set control points
vol.set_ctrlpts(ctrlptsw, dim_u, dim_v, dim_w)
# 4th, 5th and 6th lines are knot vectors
vol.knotvector_u = [float(u) for u in content[3]]
vol.knotvector_v = [float(v) for v in content[4]]
vol.knotvector_w = [float(w) for w in content[5]]
# Return the volume instance
return vol | [
"def",
"import_vol_mesh",
"(",
"file_name",
")",
":",
"raw_content",
"=",
"read_file",
"(",
"file_name",
")",
"raw_content",
"=",
"raw_content",
".",
"split",
"(",
"\"\\n\"",
")",
"content",
"=",
"[",
"]",
"for",
"rc",
"in",
"raw_content",
":",
"temp",
"="... | Generates a NURBS volume object from a mesh file.
:param file_name: input mesh file
:type file_name: str
:return: a NURBS volume
:rtype: NURBS.Volume | [
"Generates",
"a",
"NURBS",
"volume",
"object",
"from",
"a",
"mesh",
"file",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_exchange.py#L153-L207 | train | 225,167 |
orbingol/NURBS-Python | geomdl/exchange.py | import_txt | def import_txt(file_name, two_dimensional=False, **kwargs):
""" Reads control points from a text file and generates a 1-dimensional list of control points.
The following code examples illustrate importing different types of text files for curves and surfaces:
.. code-block:: python
:linenos:
# Import curve control points from a text file
curve_ctrlpts = exchange.import_txt(file_name="control_points.txt")
# Import surface control points from a text file (1-dimensional file)
surf_ctrlpts = exchange.import_txt(file_name="control_points.txt")
# Import surface control points from a text file (2-dimensional file)
surf_ctrlpts, size_u, size_v = exchange.import_txt(file_name="control_points.txt", two_dimensional=True)
If argument ``jinja2=True`` is set, then the input file is processed as a `Jinja2 <http://jinja.pocoo.org/>`_
template. You can also use the following convenience template functions which correspond to the given mathematical
equations:
* ``sqrt(x)``: :math:`\\sqrt{x}`
* ``cubert(x)``: :math:`\\sqrt[3]{x}`
* ``pow(x, y)``: :math:`x^{y}`
You may set the file delimiters using the keyword arguments ``separator`` and ``col_separator``, respectively.
``separator`` is the delimiter between the coordinates of the control points. It could be comma
``1, 2, 3`` or space ``1 2 3`` or something else. ``col_separator`` is the delimiter between the control
points and is only valid when ``two_dimensional`` is ``True``. Assuming that ``separator`` is set to space, then
``col_operator`` could be semi-colon ``1 2 3; 4 5 6`` or pipe ``1 2 3| 4 5 6`` or comma ``1 2 3, 4 5 6`` or
something else.
The defaults for ``separator`` and ``col_separator`` are *comma (,)* and *semi-colon (;)*, respectively.
The following code examples illustrate the usage of the keyword arguments discussed above.
.. code-block:: python
:linenos:
# Import curve control points from a text file delimited with space
curve_ctrlpts = exchange.import_txt(file_name="control_points.txt", separator=" ")
# Import surface control points from a text file (2-dimensional file) w/ space and comma delimiters
surf_ctrlpts, size_u, size_v = exchange.import_txt(file_name="control_points.txt", two_dimensional=True,
separator=" ", col_separator=",")
Please note that this function does not check whether the user set delimiters to the same value or not.
:param file_name: file name of the text file
:type file_name: str
:param two_dimensional: type of the text file
:type two_dimensional: bool
:return: list of control points, if two_dimensional, then also returns size in u- and v-directions
:rtype: list
:raises GeomdlException: an error occurred reading the file
"""
# Read file
content = exch.read_file(file_name)
# Are we using a Jinja2 template?
j2tmpl = kwargs.get('jinja2', False)
if j2tmpl:
content = exch.process_template(content)
# File delimiters
col_sep = kwargs.get('col_separator', ";")
sep = kwargs.get('separator', ",")
return exch.import_text_data(content, sep, col_sep, two_dimensional) | python | def import_txt(file_name, two_dimensional=False, **kwargs):
""" Reads control points from a text file and generates a 1-dimensional list of control points.
The following code examples illustrate importing different types of text files for curves and surfaces:
.. code-block:: python
:linenos:
# Import curve control points from a text file
curve_ctrlpts = exchange.import_txt(file_name="control_points.txt")
# Import surface control points from a text file (1-dimensional file)
surf_ctrlpts = exchange.import_txt(file_name="control_points.txt")
# Import surface control points from a text file (2-dimensional file)
surf_ctrlpts, size_u, size_v = exchange.import_txt(file_name="control_points.txt", two_dimensional=True)
If argument ``jinja2=True`` is set, then the input file is processed as a `Jinja2 <http://jinja.pocoo.org/>`_
template. You can also use the following convenience template functions which correspond to the given mathematical
equations:
* ``sqrt(x)``: :math:`\\sqrt{x}`
* ``cubert(x)``: :math:`\\sqrt[3]{x}`
* ``pow(x, y)``: :math:`x^{y}`
You may set the file delimiters using the keyword arguments ``separator`` and ``col_separator``, respectively.
``separator`` is the delimiter between the coordinates of the control points. It could be comma
``1, 2, 3`` or space ``1 2 3`` or something else. ``col_separator`` is the delimiter between the control
points and is only valid when ``two_dimensional`` is ``True``. Assuming that ``separator`` is set to space, then
``col_operator`` could be semi-colon ``1 2 3; 4 5 6`` or pipe ``1 2 3| 4 5 6`` or comma ``1 2 3, 4 5 6`` or
something else.
The defaults for ``separator`` and ``col_separator`` are *comma (,)* and *semi-colon (;)*, respectively.
The following code examples illustrate the usage of the keyword arguments discussed above.
.. code-block:: python
:linenos:
# Import curve control points from a text file delimited with space
curve_ctrlpts = exchange.import_txt(file_name="control_points.txt", separator=" ")
# Import surface control points from a text file (2-dimensional file) w/ space and comma delimiters
surf_ctrlpts, size_u, size_v = exchange.import_txt(file_name="control_points.txt", two_dimensional=True,
separator=" ", col_separator=",")
Please note that this function does not check whether the user set delimiters to the same value or not.
:param file_name: file name of the text file
:type file_name: str
:param two_dimensional: type of the text file
:type two_dimensional: bool
:return: list of control points, if two_dimensional, then also returns size in u- and v-directions
:rtype: list
:raises GeomdlException: an error occurred reading the file
"""
# Read file
content = exch.read_file(file_name)
# Are we using a Jinja2 template?
j2tmpl = kwargs.get('jinja2', False)
if j2tmpl:
content = exch.process_template(content)
# File delimiters
col_sep = kwargs.get('col_separator', ";")
sep = kwargs.get('separator', ",")
return exch.import_text_data(content, sep, col_sep, two_dimensional) | [
"def",
"import_txt",
"(",
"file_name",
",",
"two_dimensional",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"# Read file",
"content",
"=",
"exch",
".",
"read_file",
"(",
"file_name",
")",
"# Are we using a Jinja2 template?",
"j2tmpl",
"=",
"kwargs",
".",
"... | Reads control points from a text file and generates a 1-dimensional list of control points.
The following code examples illustrate importing different types of text files for curves and surfaces:
.. code-block:: python
:linenos:
# Import curve control points from a text file
curve_ctrlpts = exchange.import_txt(file_name="control_points.txt")
# Import surface control points from a text file (1-dimensional file)
surf_ctrlpts = exchange.import_txt(file_name="control_points.txt")
# Import surface control points from a text file (2-dimensional file)
surf_ctrlpts, size_u, size_v = exchange.import_txt(file_name="control_points.txt", two_dimensional=True)
If argument ``jinja2=True`` is set, then the input file is processed as a `Jinja2 <http://jinja.pocoo.org/>`_
template. You can also use the following convenience template functions which correspond to the given mathematical
equations:
* ``sqrt(x)``: :math:`\\sqrt{x}`
* ``cubert(x)``: :math:`\\sqrt[3]{x}`
* ``pow(x, y)``: :math:`x^{y}`
You may set the file delimiters using the keyword arguments ``separator`` and ``col_separator``, respectively.
``separator`` is the delimiter between the coordinates of the control points. It could be comma
``1, 2, 3`` or space ``1 2 3`` or something else. ``col_separator`` is the delimiter between the control
points and is only valid when ``two_dimensional`` is ``True``. Assuming that ``separator`` is set to space, then
``col_operator`` could be semi-colon ``1 2 3; 4 5 6`` or pipe ``1 2 3| 4 5 6`` or comma ``1 2 3, 4 5 6`` or
something else.
The defaults for ``separator`` and ``col_separator`` are *comma (,)* and *semi-colon (;)*, respectively.
The following code examples illustrate the usage of the keyword arguments discussed above.
.. code-block:: python
:linenos:
# Import curve control points from a text file delimited with space
curve_ctrlpts = exchange.import_txt(file_name="control_points.txt", separator=" ")
# Import surface control points from a text file (2-dimensional file) w/ space and comma delimiters
surf_ctrlpts, size_u, size_v = exchange.import_txt(file_name="control_points.txt", two_dimensional=True,
separator=" ", col_separator=",")
Please note that this function does not check whether the user set delimiters to the same value or not.
:param file_name: file name of the text file
:type file_name: str
:param two_dimensional: type of the text file
:type two_dimensional: bool
:return: list of control points, if two_dimensional, then also returns size in u- and v-directions
:rtype: list
:raises GeomdlException: an error occurred reading the file | [
"Reads",
"control",
"points",
"from",
"a",
"text",
"file",
"and",
"generates",
"a",
"1",
"-",
"dimensional",
"list",
"of",
"control",
"points",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L21-L89 | train | 225,168 |
orbingol/NURBS-Python | geomdl/exchange.py | export_txt | def export_txt(obj, file_name, two_dimensional=False, **kwargs):
""" Exports control points as a text file.
For curves the output is always a list of control points. For surfaces, it is possible to generate a 2-dimensional
control point output file using ``two_dimensional``.
Please see :py:func:`.exchange.import_txt()` for detailed description of the keyword arguments.
:param obj: a spline geometry object
:type obj: abstract.SplineGeometry
:param file_name: file name of the text file to be saved
:type file_name: str
:param two_dimensional: type of the text file (only works for Surface objects)
:type two_dimensional: bool
:raises GeomdlException: an error occurred writing the file
"""
# Check if the user has set any control points
if obj.ctrlpts is None or len(obj.ctrlpts) == 0:
raise exch.GeomdlException("There are no control points to save!")
# Check the usage of two_dimensional flag
if obj.pdimension == 1 and two_dimensional:
# Silently ignore two_dimensional flag
two_dimensional = False
# File delimiters
col_sep = kwargs.get('col_separator', ";")
sep = kwargs.get('separator', ",")
content = exch.export_text_data(obj, sep, col_sep, two_dimensional)
return exch.write_file(file_name, content) | python | def export_txt(obj, file_name, two_dimensional=False, **kwargs):
""" Exports control points as a text file.
For curves the output is always a list of control points. For surfaces, it is possible to generate a 2-dimensional
control point output file using ``two_dimensional``.
Please see :py:func:`.exchange.import_txt()` for detailed description of the keyword arguments.
:param obj: a spline geometry object
:type obj: abstract.SplineGeometry
:param file_name: file name of the text file to be saved
:type file_name: str
:param two_dimensional: type of the text file (only works for Surface objects)
:type two_dimensional: bool
:raises GeomdlException: an error occurred writing the file
"""
# Check if the user has set any control points
if obj.ctrlpts is None or len(obj.ctrlpts) == 0:
raise exch.GeomdlException("There are no control points to save!")
# Check the usage of two_dimensional flag
if obj.pdimension == 1 and two_dimensional:
# Silently ignore two_dimensional flag
two_dimensional = False
# File delimiters
col_sep = kwargs.get('col_separator', ";")
sep = kwargs.get('separator', ",")
content = exch.export_text_data(obj, sep, col_sep, two_dimensional)
return exch.write_file(file_name, content) | [
"def",
"export_txt",
"(",
"obj",
",",
"file_name",
",",
"two_dimensional",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"# Check if the user has set any control points",
"if",
"obj",
".",
"ctrlpts",
"is",
"None",
"or",
"len",
"(",
"obj",
".",
"ctrlpts",
... | Exports control points as a text file.
For curves the output is always a list of control points. For surfaces, it is possible to generate a 2-dimensional
control point output file using ``two_dimensional``.
Please see :py:func:`.exchange.import_txt()` for detailed description of the keyword arguments.
:param obj: a spline geometry object
:type obj: abstract.SplineGeometry
:param file_name: file name of the text file to be saved
:type file_name: str
:param two_dimensional: type of the text file (only works for Surface objects)
:type two_dimensional: bool
:raises GeomdlException: an error occurred writing the file | [
"Exports",
"control",
"points",
"as",
"a",
"text",
"file",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L93-L123 | train | 225,169 |
orbingol/NURBS-Python | geomdl/exchange.py | import_csv | def import_csv(file_name, **kwargs):
""" Reads control points from a CSV file and generates a 1-dimensional list of control points.
It is possible to use a different value separator via ``separator`` keyword argument. The following code segment
illustrates the usage of ``separator`` keyword argument.
.. code-block:: python
:linenos:
# By default, import_csv uses 'comma' as the value separator
ctrlpts = exchange.import_csv("control_points.csv")
# Alternatively, it is possible to import a file containing tab-separated values
ctrlpts = exchange.import_csv("control_points.csv", separator="\\t")
The only difference of this function from :py:func:`.exchange.import_txt()` is skipping the first line of the input
file which generally contains the column headings.
:param file_name: file name of the text file
:type file_name: str
:return: list of control points
:rtype: list
:raises GeomdlException: an error occurred reading the file
"""
# File delimiters
sep = kwargs.get('separator', ",")
content = exch.read_file(file_name, skip_lines=1)
return exch.import_text_data(content, sep) | python | def import_csv(file_name, **kwargs):
""" Reads control points from a CSV file and generates a 1-dimensional list of control points.
It is possible to use a different value separator via ``separator`` keyword argument. The following code segment
illustrates the usage of ``separator`` keyword argument.
.. code-block:: python
:linenos:
# By default, import_csv uses 'comma' as the value separator
ctrlpts = exchange.import_csv("control_points.csv")
# Alternatively, it is possible to import a file containing tab-separated values
ctrlpts = exchange.import_csv("control_points.csv", separator="\\t")
The only difference of this function from :py:func:`.exchange.import_txt()` is skipping the first line of the input
file which generally contains the column headings.
:param file_name: file name of the text file
:type file_name: str
:return: list of control points
:rtype: list
:raises GeomdlException: an error occurred reading the file
"""
# File delimiters
sep = kwargs.get('separator', ",")
content = exch.read_file(file_name, skip_lines=1)
return exch.import_text_data(content, sep) | [
"def",
"import_csv",
"(",
"file_name",
",",
"*",
"*",
"kwargs",
")",
":",
"# File delimiters",
"sep",
"=",
"kwargs",
".",
"get",
"(",
"'separator'",
",",
"\",\"",
")",
"content",
"=",
"exch",
".",
"read_file",
"(",
"file_name",
",",
"skip_lines",
"=",
"1... | Reads control points from a CSV file and generates a 1-dimensional list of control points.
It is possible to use a different value separator via ``separator`` keyword argument. The following code segment
illustrates the usage of ``separator`` keyword argument.
.. code-block:: python
:linenos:
# By default, import_csv uses 'comma' as the value separator
ctrlpts = exchange.import_csv("control_points.csv")
# Alternatively, it is possible to import a file containing tab-separated values
ctrlpts = exchange.import_csv("control_points.csv", separator="\\t")
The only difference of this function from :py:func:`.exchange.import_txt()` is skipping the first line of the input
file which generally contains the column headings.
:param file_name: file name of the text file
:type file_name: str
:return: list of control points
:rtype: list
:raises GeomdlException: an error occurred reading the file | [
"Reads",
"control",
"points",
"from",
"a",
"CSV",
"file",
"and",
"generates",
"a",
"1",
"-",
"dimensional",
"list",
"of",
"control",
"points",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L127-L155 | train | 225,170 |
orbingol/NURBS-Python | geomdl/exchange.py | export_csv | def export_csv(obj, file_name, point_type='evalpts', **kwargs):
""" Exports control points or evaluated points as a CSV file.
:param obj: a spline geometry object
:type obj: abstract.SplineGeometry
:param file_name: output file name
:type file_name: str
:param point_type: ``ctrlpts`` for control points or ``evalpts`` for evaluated points
:type point_type: str
:raises GeomdlException: an error occurred writing the file
"""
if not 0 < obj.pdimension < 3:
raise exch.GeomdlException("Input object should be a curve or a surface")
# Pick correct points from the object
if point_type == 'ctrlpts':
points = obj.ctrlptsw if obj.rational else obj.ctrlpts
elif point_type == 'evalpts':
points = obj.evalpts
else:
raise exch.GeomdlException("Please choose a valid point type option. Possible types: ctrlpts, evalpts")
# Prepare CSV header
dim = len(points[0])
line = "dim "
for i in range(dim-1):
line += str(i + 1) + ", dim "
line += str(dim) + "\n"
# Prepare values
for pt in points:
line += ",".join([str(p) for p in pt]) + "\n"
# Write to file
return exch.write_file(file_name, line) | python | def export_csv(obj, file_name, point_type='evalpts', **kwargs):
""" Exports control points or evaluated points as a CSV file.
:param obj: a spline geometry object
:type obj: abstract.SplineGeometry
:param file_name: output file name
:type file_name: str
:param point_type: ``ctrlpts`` for control points or ``evalpts`` for evaluated points
:type point_type: str
:raises GeomdlException: an error occurred writing the file
"""
if not 0 < obj.pdimension < 3:
raise exch.GeomdlException("Input object should be a curve or a surface")
# Pick correct points from the object
if point_type == 'ctrlpts':
points = obj.ctrlptsw if obj.rational else obj.ctrlpts
elif point_type == 'evalpts':
points = obj.evalpts
else:
raise exch.GeomdlException("Please choose a valid point type option. Possible types: ctrlpts, evalpts")
# Prepare CSV header
dim = len(points[0])
line = "dim "
for i in range(dim-1):
line += str(i + 1) + ", dim "
line += str(dim) + "\n"
# Prepare values
for pt in points:
line += ",".join([str(p) for p in pt]) + "\n"
# Write to file
return exch.write_file(file_name, line) | [
"def",
"export_csv",
"(",
"obj",
",",
"file_name",
",",
"point_type",
"=",
"'evalpts'",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"0",
"<",
"obj",
".",
"pdimension",
"<",
"3",
":",
"raise",
"exch",
".",
"GeomdlException",
"(",
"\"Input object shoul... | Exports control points or evaluated points as a CSV file.
:param obj: a spline geometry object
:type obj: abstract.SplineGeometry
:param file_name: output file name
:type file_name: str
:param point_type: ``ctrlpts`` for control points or ``evalpts`` for evaluated points
:type point_type: str
:raises GeomdlException: an error occurred writing the file | [
"Exports",
"control",
"points",
"or",
"evaluated",
"points",
"as",
"a",
"CSV",
"file",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L159-L193 | train | 225,171 |
orbingol/NURBS-Python | geomdl/exchange.py | import_cfg | def import_cfg(file_name, **kwargs):
""" Imports curves and surfaces from files in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
return libconf.loads(data)
# Check if it is possible to import 'libconf'
try:
import libconf
except ImportError:
raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf")
# Get keyword arguments
delta = kwargs.get('delta', -1.0)
use_template = kwargs.get('jinja2', False)
# Read file
file_src = exch.read_file(file_name)
# Import data
return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template) | python | def import_cfg(file_name, **kwargs):
""" Imports curves and surfaces from files in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
return libconf.loads(data)
# Check if it is possible to import 'libconf'
try:
import libconf
except ImportError:
raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf")
# Get keyword arguments
delta = kwargs.get('delta', -1.0)
use_template = kwargs.get('jinja2', False)
# Read file
file_src = exch.read_file(file_name)
# Import data
return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template) | [
"def",
"import_cfg",
"(",
"file_name",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"callback",
"(",
"data",
")",
":",
"return",
"libconf",
".",
"loads",
"(",
"data",
")",
"# Check if it is possible to import 'libconf'",
"try",
":",
"import",
"libconf",
"except",... | Imports curves and surfaces from files in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred writing the file | [
"Imports",
"curves",
"and",
"surfaces",
"from",
"files",
"in",
"libconfig",
"format",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L197-L229 | train | 225,172 |
orbingol/NURBS-Python | geomdl/exchange.py | export_cfg | def export_cfg(obj, file_name):
""" Exports curves and surfaces in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Libconfig format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
return libconf.dumps(data)
# Check if it is possible to import 'libconf'
try:
import libconf
except ImportError:
raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf")
# Export data
exported_data = exch.export_dict_str(obj=obj, callback=callback)
# Write to file
return exch.write_file(file_name, exported_data) | python | def export_cfg(obj, file_name):
""" Exports curves and surfaces in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Libconfig format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
return libconf.dumps(data)
# Check if it is possible to import 'libconf'
try:
import libconf
except ImportError:
raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf")
# Export data
exported_data = exch.export_dict_str(obj=obj, callback=callback)
# Write to file
return exch.write_file(file_name, exported_data) | [
"def",
"export_cfg",
"(",
"obj",
",",
"file_name",
")",
":",
"def",
"callback",
"(",
"data",
")",
":",
"return",
"libconf",
".",
"dumps",
"(",
"data",
")",
"# Check if it is possible to import 'libconf'",
"try",
":",
"import",
"libconf",
"except",
"ImportError",... | Exports curves and surfaces in libconfig format.
.. note::
Requires `libconf <https://pypi.org/project/libconf/>`_ package.
Libconfig format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file | [
"Exports",
"curves",
"and",
"surfaces",
"in",
"libconfig",
"format",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L233-L262 | train | 225,173 |
orbingol/NURBS-Python | geomdl/exchange.py | import_yaml | def import_yaml(file_name, **kwargs):
""" Imports curves and surfaces from files in YAML format.
.. note::
Requires `ruamel.yaml <https://pypi.org/project/ruamel.yaml/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred reading the file
"""
def callback(data):
yaml = YAML()
return yaml.load(data)
# Check if it is possible to import 'ruamel.yaml'
try:
from ruamel.yaml import YAML
except ImportError:
raise exch.GeomdlException("Please install 'ruamel.yaml' package to use YAML format: pip install ruamel.yaml")
# Get keyword arguments
delta = kwargs.get('delta', -1.0)
use_template = kwargs.get('jinja2', False)
# Read file
file_src = exch.read_file(file_name)
# Import data
return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template) | python | def import_yaml(file_name, **kwargs):
""" Imports curves and surfaces from files in YAML format.
.. note::
Requires `ruamel.yaml <https://pypi.org/project/ruamel.yaml/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred reading the file
"""
def callback(data):
yaml = YAML()
return yaml.load(data)
# Check if it is possible to import 'ruamel.yaml'
try:
from ruamel.yaml import YAML
except ImportError:
raise exch.GeomdlException("Please install 'ruamel.yaml' package to use YAML format: pip install ruamel.yaml")
# Get keyword arguments
delta = kwargs.get('delta', -1.0)
use_template = kwargs.get('jinja2', False)
# Read file
file_src = exch.read_file(file_name)
# Import data
return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template) | [
"def",
"import_yaml",
"(",
"file_name",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"callback",
"(",
"data",
")",
":",
"yaml",
"=",
"YAML",
"(",
")",
"return",
"yaml",
".",
"load",
"(",
"data",
")",
"# Check if it is possible to import 'ruamel.yaml'",
"try",
... | Imports curves and surfaces from files in YAML format.
.. note::
Requires `ruamel.yaml <https://pypi.org/project/ruamel.yaml/>`_ package.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred reading the file | [
"Imports",
"curves",
"and",
"surfaces",
"from",
"files",
"in",
"YAML",
"format",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L266-L299 | train | 225,174 |
orbingol/NURBS-Python | geomdl/exchange.py | export_yaml | def export_yaml(obj, file_name):
""" Exports curves and surfaces in YAML format.
.. note::
Requires `ruamel.yaml <https://pypi.org/project/ruamel.yaml/>`_ package.
YAML format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
# Ref: https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string
stream = StringIO()
yaml = YAML()
yaml.dump(data, stream)
return stream.getvalue()
# Check if it is possible to import 'ruamel.yaml'
try:
from ruamel.yaml import YAML
except ImportError:
raise exch.GeomdlException("Please install 'ruamel.yaml' package to use YAML format: pip install ruamel.yaml")
# Export data
exported_data = exch.export_dict_str(obj=obj, callback=callback)
# Write to file
return exch.write_file(file_name, exported_data) | python | def export_yaml(obj, file_name):
""" Exports curves and surfaces in YAML format.
.. note::
Requires `ruamel.yaml <https://pypi.org/project/ruamel.yaml/>`_ package.
YAML format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
# Ref: https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string
stream = StringIO()
yaml = YAML()
yaml.dump(data, stream)
return stream.getvalue()
# Check if it is possible to import 'ruamel.yaml'
try:
from ruamel.yaml import YAML
except ImportError:
raise exch.GeomdlException("Please install 'ruamel.yaml' package to use YAML format: pip install ruamel.yaml")
# Export data
exported_data = exch.export_dict_str(obj=obj, callback=callback)
# Write to file
return exch.write_file(file_name, exported_data) | [
"def",
"export_yaml",
"(",
"obj",
",",
"file_name",
")",
":",
"def",
"callback",
"(",
"data",
")",
":",
"# Ref: https://yaml.readthedocs.io/en/latest/example.html#output-of-dump-as-a-string",
"stream",
"=",
"StringIO",
"(",
")",
"yaml",
"=",
"YAML",
"(",
")",
"yaml"... | Exports curves and surfaces in YAML format.
.. note::
Requires `ruamel.yaml <https://pypi.org/project/ruamel.yaml/>`_ package.
YAML format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file | [
"Exports",
"curves",
"and",
"surfaces",
"in",
"YAML",
"format",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L303-L336 | train | 225,175 |
orbingol/NURBS-Python | geomdl/exchange.py | import_json | def import_json(file_name, **kwargs):
""" Imports curves and surfaces from files in JSON format.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred reading the file
"""
def callback(data):
return json.loads(data)
# Get keyword arguments
delta = kwargs.get('delta', -1.0)
use_template = kwargs.get('jinja2', False)
# Read file
file_src = exch.read_file(file_name)
# Import data
return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template) | python | def import_json(file_name, **kwargs):
""" Imports curves and surfaces from files in JSON format.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred reading the file
"""
def callback(data):
return json.loads(data)
# Get keyword arguments
delta = kwargs.get('delta', -1.0)
use_template = kwargs.get('jinja2', False)
# Read file
file_src = exch.read_file(file_name)
# Import data
return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template) | [
"def",
"import_json",
"(",
"file_name",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"callback",
"(",
"data",
")",
":",
"return",
"json",
".",
"loads",
"(",
"data",
")",
"# Get keyword arguments",
"delta",
"=",
"kwargs",
".",
"get",
"(",
"'delta'",
",",
"... | Imports curves and surfaces from files in JSON format.
Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details.
:param file_name: name of the input file
:type file_name: str
:return: a list of rational spline geometries
:rtype: list
:raises GeomdlException: an error occurred reading the file | [
"Imports",
"curves",
"and",
"surfaces",
"from",
"files",
"in",
"JSON",
"format",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L340-L362 | train | 225,176 |
orbingol/NURBS-Python | geomdl/exchange.py | export_json | def export_json(obj, file_name):
""" Exports curves and surfaces in JSON format.
JSON format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
return json.dumps(data, indent=4)
# Export data
exported_data = exch.export_dict_str(obj=obj, callback=callback)
# Write to file
return exch.write_file(file_name, exported_data) | python | def export_json(obj, file_name):
""" Exports curves and surfaces in JSON format.
JSON format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file
"""
def callback(data):
return json.dumps(data, indent=4)
# Export data
exported_data = exch.export_dict_str(obj=obj, callback=callback)
# Write to file
return exch.write_file(file_name, exported_data) | [
"def",
"export_json",
"(",
"obj",
",",
"file_name",
")",
":",
"def",
"callback",
"(",
"data",
")",
":",
"return",
"json",
".",
"dumps",
"(",
"data",
",",
"indent",
"=",
"4",
")",
"# Export data",
"exported_data",
"=",
"exch",
".",
"export_dict_str",
"(",... | Exports curves and surfaces in JSON format.
JSON format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_
as a way to input shape data from the command line.
:param obj: input geometry
:type obj: abstract.SplineGeometry, multi.AbstractContainer
:param file_name: name of the output file
:type file_name: str
:raises GeomdlException: an error occurred writing the file | [
"Exports",
"curves",
"and",
"surfaces",
"in",
"JSON",
"format",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L366-L385 | train | 225,177 |
orbingol/NURBS-Python | geomdl/exchange.py | import_obj | def import_obj(file_name, **kwargs):
""" Reads .obj files and generates faces.
Keyword Arguments:
* ``callback``: reference to the function that processes the faces for customized output
The structure of the callback function is shown below:
.. code-block:: python
def my_callback_function(face_list):
# "face_list" will be a list of elements.Face class instances
# The function should return a list
return list()
:param file_name: file name
:type file_name: str
:return: output of the callback function (default is a list of faces)
:rtype: list
"""
def default_callback(face_list):
return face_list
# Keyword arguments
callback_func = kwargs.get('callback', default_callback)
# Read and process the input file
content = exch.read_file(file_name)
content_arr = content.split("\n")
# Initialize variables
on_face = False
vertices = []
triangles = []
faces = []
# Index values
vert_idx = 1
tri_idx = 1
face_idx = 1
# Loop through the data
for carr in content_arr:
carr = carr.strip()
data = carr.split(" ")
data = [d.strip() for d in data]
if data[0] == "v":
if on_face:
on_face = not on_face
face = elements.Face(*triangles, id=face_idx)
faces.append(face)
face_idx += 1
vertices[:] = []
triangles[:] = []
vert_idx = 1
tri_idx = 1
vertex = elements.Vertex(*data[1:], id=vert_idx)
vertices.append(vertex)
vert_idx += 1
if data[0] == "f":
on_face = True
triangle = elements.Triangle(*[vertices[int(fidx) - 1] for fidx in data[1:]], id=tri_idx)
triangles.append(triangle)
tri_idx += 1
# Process he final face
if triangles:
face = elements.Face(*triangles, id=face_idx)
faces.append(face)
# Return the output of the callback function
return callback_func(faces) | python | def import_obj(file_name, **kwargs):
""" Reads .obj files and generates faces.
Keyword Arguments:
* ``callback``: reference to the function that processes the faces for customized output
The structure of the callback function is shown below:
.. code-block:: python
def my_callback_function(face_list):
# "face_list" will be a list of elements.Face class instances
# The function should return a list
return list()
:param file_name: file name
:type file_name: str
:return: output of the callback function (default is a list of faces)
:rtype: list
"""
def default_callback(face_list):
return face_list
# Keyword arguments
callback_func = kwargs.get('callback', default_callback)
# Read and process the input file
content = exch.read_file(file_name)
content_arr = content.split("\n")
# Initialize variables
on_face = False
vertices = []
triangles = []
faces = []
# Index values
vert_idx = 1
tri_idx = 1
face_idx = 1
# Loop through the data
for carr in content_arr:
carr = carr.strip()
data = carr.split(" ")
data = [d.strip() for d in data]
if data[0] == "v":
if on_face:
on_face = not on_face
face = elements.Face(*triangles, id=face_idx)
faces.append(face)
face_idx += 1
vertices[:] = []
triangles[:] = []
vert_idx = 1
tri_idx = 1
vertex = elements.Vertex(*data[1:], id=vert_idx)
vertices.append(vertex)
vert_idx += 1
if data[0] == "f":
on_face = True
triangle = elements.Triangle(*[vertices[int(fidx) - 1] for fidx in data[1:]], id=tri_idx)
triangles.append(triangle)
tri_idx += 1
# Process he final face
if triangles:
face = elements.Face(*triangles, id=face_idx)
faces.append(face)
# Return the output of the callback function
return callback_func(faces) | [
"def",
"import_obj",
"(",
"file_name",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"default_callback",
"(",
"face_list",
")",
":",
"return",
"face_list",
"# Keyword arguments",
"callback_func",
"=",
"kwargs",
".",
"get",
"(",
"'callback'",
",",
"default_callback",... | Reads .obj files and generates faces.
Keyword Arguments:
* ``callback``: reference to the function that processes the faces for customized output
The structure of the callback function is shown below:
.. code-block:: python
def my_callback_function(face_list):
# "face_list" will be a list of elements.Face class instances
# The function should return a list
return list()
:param file_name: file name
:type file_name: str
:return: output of the callback function (default is a list of faces)
:rtype: list | [
"Reads",
".",
"obj",
"files",
"and",
"generates",
"faces",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/exchange.py#L389-L460 | train | 225,178 |
orbingol/NURBS-Python | geomdl/multi.py | select_color | def select_color(cpcolor, evalcolor, idx=0):
""" Selects item color for plotting.
:param cpcolor: color for control points grid item
:type cpcolor: str, list, tuple
:param evalcolor: color for evaluated points grid item
:type evalcolor: str, list, tuple
:param idx: index of the current geometry object
:type idx: int
:return: a list of color values
:rtype: list
"""
# Random colors by default
color = utilities.color_generator()
# Constant color for control points grid
if isinstance(cpcolor, str):
color[0] = cpcolor
# User-defined color for control points grid
if isinstance(cpcolor, (list, tuple)):
color[0] = cpcolor[idx]
# Constant color for evaluated points grid
if isinstance(evalcolor, str):
color[1] = evalcolor
# User-defined color for evaluated points grid
if isinstance(evalcolor, (list, tuple)):
color[1] = evalcolor[idx]
return color | python | def select_color(cpcolor, evalcolor, idx=0):
""" Selects item color for plotting.
:param cpcolor: color for control points grid item
:type cpcolor: str, list, tuple
:param evalcolor: color for evaluated points grid item
:type evalcolor: str, list, tuple
:param idx: index of the current geometry object
:type idx: int
:return: a list of color values
:rtype: list
"""
# Random colors by default
color = utilities.color_generator()
# Constant color for control points grid
if isinstance(cpcolor, str):
color[0] = cpcolor
# User-defined color for control points grid
if isinstance(cpcolor, (list, tuple)):
color[0] = cpcolor[idx]
# Constant color for evaluated points grid
if isinstance(evalcolor, str):
color[1] = evalcolor
# User-defined color for evaluated points grid
if isinstance(evalcolor, (list, tuple)):
color[1] = evalcolor[idx]
return color | [
"def",
"select_color",
"(",
"cpcolor",
",",
"evalcolor",
",",
"idx",
"=",
"0",
")",
":",
"# Random colors by default",
"color",
"=",
"utilities",
".",
"color_generator",
"(",
")",
"# Constant color for control points grid",
"if",
"isinstance",
"(",
"cpcolor",
",",
... | Selects item color for plotting.
:param cpcolor: color for control points grid item
:type cpcolor: str, list, tuple
:param evalcolor: color for evaluated points grid item
:type evalcolor: str, list, tuple
:param idx: index of the current geometry object
:type idx: int
:return: a list of color values
:rtype: list | [
"Selects",
"item",
"color",
"for",
"plotting",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/multi.py#L1080-L1111 | train | 225,179 |
orbingol/NURBS-Python | geomdl/multi.py | process_tessellate | def process_tessellate(elem, update_delta, delta, **kwargs):
""" Tessellates surfaces.
.. note:: Helper function required for ``multiprocessing``
:param elem: surface
:type elem: abstract.Surface
:param update_delta: flag to control evaluation delta updates
:type update_delta: bool
:param delta: evaluation delta
:type delta: list, tuple
:return: updated surface
:rtype: abstract.Surface
"""
if update_delta:
elem.delta = delta
elem.evaluate()
elem.tessellate(**kwargs)
return elem | python | def process_tessellate(elem, update_delta, delta, **kwargs):
""" Tessellates surfaces.
.. note:: Helper function required for ``multiprocessing``
:param elem: surface
:type elem: abstract.Surface
:param update_delta: flag to control evaluation delta updates
:type update_delta: bool
:param delta: evaluation delta
:type delta: list, tuple
:return: updated surface
:rtype: abstract.Surface
"""
if update_delta:
elem.delta = delta
elem.evaluate()
elem.tessellate(**kwargs)
return elem | [
"def",
"process_tessellate",
"(",
"elem",
",",
"update_delta",
",",
"delta",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"update_delta",
":",
"elem",
".",
"delta",
"=",
"delta",
"elem",
".",
"evaluate",
"(",
")",
"elem",
".",
"tessellate",
"(",
"*",
"*",
... | Tessellates surfaces.
.. note:: Helper function required for ``multiprocessing``
:param elem: surface
:type elem: abstract.Surface
:param update_delta: flag to control evaluation delta updates
:type update_delta: bool
:param delta: evaluation delta
:type delta: list, tuple
:return: updated surface
:rtype: abstract.Surface | [
"Tessellates",
"surfaces",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/multi.py#L1114-L1132 | train | 225,180 |
orbingol/NURBS-Python | geomdl/multi.py | process_elements_surface | def process_elements_surface(elem, mconf, colorval, idx, force_tsl, update_delta, delta, reset_names):
""" Processes visualization elements for surfaces.
.. note:: Helper function required for ``multiprocessing``
:param elem: surface
:type elem: abstract.Surface
:param mconf: visualization module configuration
:type mconf: dict
:param colorval: color values
:type colorval: tuple
:param idx: index of the surface
:type idx: int
:param force_tsl: flag to force re-tessellation
:type force_tsl: bool
:param update_delta: flag to update surface delta
:type update_delta: bool
:param delta: new surface evaluation delta
:type delta: list, tuple
:param reset_names: flag to reset names
:type reset_names: bool
:return: visualization element (as a dict)
:rtype: list
"""
if idx < 0:
lock.acquire()
idx = counter.value
counter.value += 1
lock.release()
if update_delta:
elem.delta = delta
elem.evaluate()
# Reset element name
if reset_names:
elem.name = "surface"
# Fix element name
if elem.name == "surface" and idx >= 0:
elem.name = elem.name + " " + str(idx)
# Color selection
color = select_color(colorval[0], colorval[1], idx=idx)
# Initialize the return list
rl = []
# Add control points
if mconf['ctrlpts'] == 'points':
ret = dict(ptsarr=elem.ctrlpts, name=(elem.name, "(CP)"),
color=color[0], plot_type='ctrlpts', idx=idx)
rl.append(ret)
# Add control points as quads
if mconf['ctrlpts'] == 'quads':
qtsl = tessellate.QuadTessellate()
qtsl.tessellate(elem.ctrlpts, size_u=elem.ctrlpts_size_u, size_v=elem.ctrlpts_size_v)
ret = dict(ptsarr=[qtsl.vertices, qtsl.faces], name=(elem.name, "(CP)"),
color=color[0], plot_type='ctrlpts', idx=idx)
rl.append(ret)
# Add surface points
if mconf['evalpts'] == 'points':
ret = dict(ptsarr=elem.evalpts, name=(elem.name, idx), color=color[1], plot_type='evalpts', idx=idx)
rl.append(ret)
# Add surface points as quads
if mconf['evalpts'] == 'quads':
qtsl = tessellate.QuadTessellate()
qtsl.tessellate(elem.evalpts, size_u=elem.sample_size_u, size_v=elem.sample_size_v)
ret = dict(ptsarr=[qtsl.vertices, qtsl.faces],
name=elem.name, color=color[1], plot_type='evalpts', idx=idx)
rl.append(ret)
# Add surface points as vertices and triangles
if mconf['evalpts'] == 'triangles':
elem.tessellate(force=force_tsl)
ret = dict(ptsarr=[elem.tessellator.vertices, elem.tessellator.faces],
name=elem.name, color=color[1], plot_type='evalpts', idx=idx)
rl.append(ret)
# Add the trim curves
for itc, trim in enumerate(elem.trims):
ret = dict(ptsarr=elem.evaluate_list(trim.evalpts), name=("trim", itc),
color=colorval[2], plot_type='trimcurve', idx=idx)
rl.append(ret)
# Return the list
return rl | python | def process_elements_surface(elem, mconf, colorval, idx, force_tsl, update_delta, delta, reset_names):
""" Processes visualization elements for surfaces.
.. note:: Helper function required for ``multiprocessing``
:param elem: surface
:type elem: abstract.Surface
:param mconf: visualization module configuration
:type mconf: dict
:param colorval: color values
:type colorval: tuple
:param idx: index of the surface
:type idx: int
:param force_tsl: flag to force re-tessellation
:type force_tsl: bool
:param update_delta: flag to update surface delta
:type update_delta: bool
:param delta: new surface evaluation delta
:type delta: list, tuple
:param reset_names: flag to reset names
:type reset_names: bool
:return: visualization element (as a dict)
:rtype: list
"""
if idx < 0:
lock.acquire()
idx = counter.value
counter.value += 1
lock.release()
if update_delta:
elem.delta = delta
elem.evaluate()
# Reset element name
if reset_names:
elem.name = "surface"
# Fix element name
if elem.name == "surface" and idx >= 0:
elem.name = elem.name + " " + str(idx)
# Color selection
color = select_color(colorval[0], colorval[1], idx=idx)
# Initialize the return list
rl = []
# Add control points
if mconf['ctrlpts'] == 'points':
ret = dict(ptsarr=elem.ctrlpts, name=(elem.name, "(CP)"),
color=color[0], plot_type='ctrlpts', idx=idx)
rl.append(ret)
# Add control points as quads
if mconf['ctrlpts'] == 'quads':
qtsl = tessellate.QuadTessellate()
qtsl.tessellate(elem.ctrlpts, size_u=elem.ctrlpts_size_u, size_v=elem.ctrlpts_size_v)
ret = dict(ptsarr=[qtsl.vertices, qtsl.faces], name=(elem.name, "(CP)"),
color=color[0], plot_type='ctrlpts', idx=idx)
rl.append(ret)
# Add surface points
if mconf['evalpts'] == 'points':
ret = dict(ptsarr=elem.evalpts, name=(elem.name, idx), color=color[1], plot_type='evalpts', idx=idx)
rl.append(ret)
# Add surface points as quads
if mconf['evalpts'] == 'quads':
qtsl = tessellate.QuadTessellate()
qtsl.tessellate(elem.evalpts, size_u=elem.sample_size_u, size_v=elem.sample_size_v)
ret = dict(ptsarr=[qtsl.vertices, qtsl.faces],
name=elem.name, color=color[1], plot_type='evalpts', idx=idx)
rl.append(ret)
# Add surface points as vertices and triangles
if mconf['evalpts'] == 'triangles':
elem.tessellate(force=force_tsl)
ret = dict(ptsarr=[elem.tessellator.vertices, elem.tessellator.faces],
name=elem.name, color=color[1], plot_type='evalpts', idx=idx)
rl.append(ret)
# Add the trim curves
for itc, trim in enumerate(elem.trims):
ret = dict(ptsarr=elem.evaluate_list(trim.evalpts), name=("trim", itc),
color=colorval[2], plot_type='trimcurve', idx=idx)
rl.append(ret)
# Return the list
return rl | [
"def",
"process_elements_surface",
"(",
"elem",
",",
"mconf",
",",
"colorval",
",",
"idx",
",",
"force_tsl",
",",
"update_delta",
",",
"delta",
",",
"reset_names",
")",
":",
"if",
"idx",
"<",
"0",
":",
"lock",
".",
"acquire",
"(",
")",
"idx",
"=",
"cou... | Processes visualization elements for surfaces.
.. note:: Helper function required for ``multiprocessing``
:param elem: surface
:type elem: abstract.Surface
:param mconf: visualization module configuration
:type mconf: dict
:param colorval: color values
:type colorval: tuple
:param idx: index of the surface
:type idx: int
:param force_tsl: flag to force re-tessellation
:type force_tsl: bool
:param update_delta: flag to update surface delta
:type update_delta: bool
:param delta: new surface evaluation delta
:type delta: list, tuple
:param reset_names: flag to reset names
:type reset_names: bool
:return: visualization element (as a dict)
:rtype: list | [
"Processes",
"visualization",
"elements",
"for",
"surfaces",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/multi.py#L1135-L1224 | train | 225,181 |
orbingol/NURBS-Python | geomdl/helpers.py | find_span_binsearch | def find_span_binsearch(degree, knot_vector, num_ctrlpts, knot, **kwargs):
""" Finds the span of the knot over the input knot vector using binary search.
Implementation of Algorithm A2.1 from The NURBS Book by Piegl & Tiller.
The NURBS Book states that the knot span index always starts from zero, i.e. for a knot vector [0, 0, 1, 1];
if FindSpan returns 1, then the knot is between the interval [0, 1).
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: knot span
:rtype: int
"""
# Get tolerance value
tol = kwargs.get('tol', 10e-6)
# In The NURBS Book; number of knots = m + 1, number of control points = n + 1, p = degree
# All knot vectors should follow the rule: m = p + n + 1
n = num_ctrlpts - 1
if abs(knot_vector[n + 1] - knot) <= tol:
return n
# Set max and min positions of the array to be searched
low = degree
high = num_ctrlpts
# The division could return a float value which makes it impossible to use as an array index
mid = (low + high) / 2
# Direct int casting would cause numerical errors due to discarding the significand figures (digits after the dot)
# The round function could return unexpected results, so we add the floating point with some small number
# This addition would solve the issues caused by the division operation and how Python stores float numbers.
# E.g. round(13/2) = 6 (expected to see 7)
mid = int(round(mid + tol))
# Search for the span
while (knot < knot_vector[mid]) or (knot >= knot_vector[mid + 1]):
if knot < knot_vector[mid]:
high = mid
else:
low = mid
mid = int((low + high) / 2)
return mid | python | def find_span_binsearch(degree, knot_vector, num_ctrlpts, knot, **kwargs):
""" Finds the span of the knot over the input knot vector using binary search.
Implementation of Algorithm A2.1 from The NURBS Book by Piegl & Tiller.
The NURBS Book states that the knot span index always starts from zero, i.e. for a knot vector [0, 0, 1, 1];
if FindSpan returns 1, then the knot is between the interval [0, 1).
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: knot span
:rtype: int
"""
# Get tolerance value
tol = kwargs.get('tol', 10e-6)
# In The NURBS Book; number of knots = m + 1, number of control points = n + 1, p = degree
# All knot vectors should follow the rule: m = p + n + 1
n = num_ctrlpts - 1
if abs(knot_vector[n + 1] - knot) <= tol:
return n
# Set max and min positions of the array to be searched
low = degree
high = num_ctrlpts
# The division could return a float value which makes it impossible to use as an array index
mid = (low + high) / 2
# Direct int casting would cause numerical errors due to discarding the significand figures (digits after the dot)
# The round function could return unexpected results, so we add the floating point with some small number
# This addition would solve the issues caused by the division operation and how Python stores float numbers.
# E.g. round(13/2) = 6 (expected to see 7)
mid = int(round(mid + tol))
# Search for the span
while (knot < knot_vector[mid]) or (knot >= knot_vector[mid + 1]):
if knot < knot_vector[mid]:
high = mid
else:
low = mid
mid = int((low + high) / 2)
return mid | [
"def",
"find_span_binsearch",
"(",
"degree",
",",
"knot_vector",
",",
"num_ctrlpts",
",",
"knot",
",",
"*",
"*",
"kwargs",
")",
":",
"# Get tolerance value",
"tol",
"=",
"kwargs",
".",
"get",
"(",
"'tol'",
",",
"10e-6",
")",
"# In The NURBS Book; number of knots... | Finds the span of the knot over the input knot vector using binary search.
Implementation of Algorithm A2.1 from The NURBS Book by Piegl & Tiller.
The NURBS Book states that the knot span index always starts from zero, i.e. for a knot vector [0, 0, 1, 1];
if FindSpan returns 1, then the knot is between the interval [0, 1).
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: knot span
:rtype: int | [
"Finds",
"the",
"span",
"of",
"the",
"knot",
"over",
"the",
"input",
"knot",
"vector",
"using",
"binary",
"search",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L20-L68 | train | 225,182 |
orbingol/NURBS-Python | geomdl/helpers.py | find_span_linear | def find_span_linear(degree, knot_vector, num_ctrlpts, knot, **kwargs):
""" Finds the span of a single knot over the knot vector using linear search.
Alternative implementation for the Algorithm A2.1 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: knot span
:rtype: int
"""
span = 0 # Knot span index starts from zero
while span < num_ctrlpts and knot_vector[span] <= knot:
span += 1
return span - 1 | python | def find_span_linear(degree, knot_vector, num_ctrlpts, knot, **kwargs):
""" Finds the span of a single knot over the knot vector using linear search.
Alternative implementation for the Algorithm A2.1 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: knot span
:rtype: int
"""
span = 0 # Knot span index starts from zero
while span < num_ctrlpts and knot_vector[span] <= knot:
span += 1
return span - 1 | [
"def",
"find_span_linear",
"(",
"degree",
",",
"knot_vector",
",",
"num_ctrlpts",
",",
"knot",
",",
"*",
"*",
"kwargs",
")",
":",
"span",
"=",
"0",
"# Knot span index starts from zero",
"while",
"span",
"<",
"num_ctrlpts",
"and",
"knot_vector",
"[",
"span",
"]... | Finds the span of a single knot over the knot vector using linear search.
Alternative implementation for the Algorithm A2.1 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: knot span
:rtype: int | [
"Finds",
"the",
"span",
"of",
"a",
"single",
"knot",
"over",
"the",
"knot",
"vector",
"using",
"linear",
"search",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L71-L91 | train | 225,183 |
orbingol/NURBS-Python | geomdl/helpers.py | find_spans | def find_spans(degree, knot_vector, num_ctrlpts, knots, func=find_span_linear):
""" Finds spans of a list of knots over the knot vector.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knots: list of knots or parameters
:type knots: list, tuple
:param func: function for span finding, e.g. linear or binary search
:return: list of spans
:rtype: list
"""
spans = []
for knot in knots:
spans.append(func(degree, knot_vector, num_ctrlpts, knot))
return spans | python | def find_spans(degree, knot_vector, num_ctrlpts, knots, func=find_span_linear):
""" Finds spans of a list of knots over the knot vector.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knots: list of knots or parameters
:type knots: list, tuple
:param func: function for span finding, e.g. linear or binary search
:return: list of spans
:rtype: list
"""
spans = []
for knot in knots:
spans.append(func(degree, knot_vector, num_ctrlpts, knot))
return spans | [
"def",
"find_spans",
"(",
"degree",
",",
"knot_vector",
",",
"num_ctrlpts",
",",
"knots",
",",
"func",
"=",
"find_span_linear",
")",
":",
"spans",
"=",
"[",
"]",
"for",
"knot",
"in",
"knots",
":",
"spans",
".",
"append",
"(",
"func",
"(",
"degree",
","... | Finds spans of a list of knots over the knot vector.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param num_ctrlpts: number of control points, :math:`n + 1`
:type num_ctrlpts: int
:param knots: list of knots or parameters
:type knots: list, tuple
:param func: function for span finding, e.g. linear or binary search
:return: list of spans
:rtype: list | [
"Finds",
"spans",
"of",
"a",
"list",
"of",
"knots",
"over",
"the",
"knot",
"vector",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L94-L112 | train | 225,184 |
orbingol/NURBS-Python | geomdl/helpers.py | find_multiplicity | def find_multiplicity(knot, knot_vector, **kwargs):
""" Finds knot multiplicity over the knot vector.
Keyword Arguments:
* ``tol``: tolerance (delta) value for equality checking
:param knot: knot or parameter, :math:`u`
:type knot: float
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:return: knot multiplicity, :math:`s`
:rtype: int
"""
# Get tolerance value
tol = kwargs.get('tol', 10e-8)
mult = 0 # initial multiplicity
for kv in knot_vector:
if abs(knot - kv) <= tol:
mult += 1
return mult | python | def find_multiplicity(knot, knot_vector, **kwargs):
""" Finds knot multiplicity over the knot vector.
Keyword Arguments:
* ``tol``: tolerance (delta) value for equality checking
:param knot: knot or parameter, :math:`u`
:type knot: float
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:return: knot multiplicity, :math:`s`
:rtype: int
"""
# Get tolerance value
tol = kwargs.get('tol', 10e-8)
mult = 0 # initial multiplicity
for kv in knot_vector:
if abs(knot - kv) <= tol:
mult += 1
return mult | [
"def",
"find_multiplicity",
"(",
"knot",
",",
"knot_vector",
",",
"*",
"*",
"kwargs",
")",
":",
"# Get tolerance value",
"tol",
"=",
"kwargs",
".",
"get",
"(",
"'tol'",
",",
"10e-8",
")",
"mult",
"=",
"0",
"# initial multiplicity",
"for",
"kv",
"in",
"knot... | Finds knot multiplicity over the knot vector.
Keyword Arguments:
* ``tol``: tolerance (delta) value for equality checking
:param knot: knot or parameter, :math:`u`
:type knot: float
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:return: knot multiplicity, :math:`s`
:rtype: int | [
"Finds",
"knot",
"multiplicity",
"over",
"the",
"knot",
"vector",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L115-L137 | train | 225,185 |
orbingol/NURBS-Python | geomdl/helpers.py | basis_function | def basis_function(degree, knot_vector, span, knot):
""" Computes the non-vanishing basis functions for a single parameter.
Implementation of Algorithm A2.2 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis functions
:rtype: list
"""
left = [0.0 for _ in range(degree + 1)]
right = [0.0 for _ in range(degree + 1)]
N = [1.0 for _ in range(degree + 1)] # N[0] = 1.0 by definition
for j in range(1, degree + 1):
left[j] = knot - knot_vector[span + 1 - j]
right[j] = knot_vector[span + j] - knot
saved = 0.0
for r in range(0, j):
temp = N[r] / (right[r + 1] + left[j - r])
N[r] = saved + right[r + 1] * temp
saved = left[j - r] * temp
N[j] = saved
return N | python | def basis_function(degree, knot_vector, span, knot):
""" Computes the non-vanishing basis functions for a single parameter.
Implementation of Algorithm A2.2 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis functions
:rtype: list
"""
left = [0.0 for _ in range(degree + 1)]
right = [0.0 for _ in range(degree + 1)]
N = [1.0 for _ in range(degree + 1)] # N[0] = 1.0 by definition
for j in range(1, degree + 1):
left[j] = knot - knot_vector[span + 1 - j]
right[j] = knot_vector[span + j] - knot
saved = 0.0
for r in range(0, j):
temp = N[r] / (right[r + 1] + left[j - r])
N[r] = saved + right[r + 1] * temp
saved = left[j - r] * temp
N[j] = saved
return N | [
"def",
"basis_function",
"(",
"degree",
",",
"knot_vector",
",",
"span",
",",
"knot",
")",
":",
"left",
"=",
"[",
"0.0",
"for",
"_",
"in",
"range",
"(",
"degree",
"+",
"1",
")",
"]",
"right",
"=",
"[",
"0.0",
"for",
"_",
"in",
"range",
"(",
"degr... | Computes the non-vanishing basis functions for a single parameter.
Implementation of Algorithm A2.2 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis functions
:rtype: list | [
"Computes",
"the",
"non",
"-",
"vanishing",
"basis",
"functions",
"for",
"a",
"single",
"parameter",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L140-L170 | train | 225,186 |
orbingol/NURBS-Python | geomdl/helpers.py | basis_functions | def basis_functions(degree, knot_vector, spans, knots):
""" Computes the non-vanishing basis functions for a list of parameters.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param spans: list of knot spans
:type spans: list, tuple
:param knots: list of knots or parameters
:type knots: list, tuple
:return: basis functions
:rtype: list
"""
basis = []
for span, knot in zip(spans, knots):
basis.append(basis_function(degree, knot_vector, span, knot))
return basis | python | def basis_functions(degree, knot_vector, spans, knots):
""" Computes the non-vanishing basis functions for a list of parameters.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param spans: list of knot spans
:type spans: list, tuple
:param knots: list of knots or parameters
:type knots: list, tuple
:return: basis functions
:rtype: list
"""
basis = []
for span, knot in zip(spans, knots):
basis.append(basis_function(degree, knot_vector, span, knot))
return basis | [
"def",
"basis_functions",
"(",
"degree",
",",
"knot_vector",
",",
"spans",
",",
"knots",
")",
":",
"basis",
"=",
"[",
"]",
"for",
"span",
",",
"knot",
"in",
"zip",
"(",
"spans",
",",
"knots",
")",
":",
"basis",
".",
"append",
"(",
"basis_function",
"... | Computes the non-vanishing basis functions for a list of parameters.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param spans: list of knot spans
:type spans: list, tuple
:param knots: list of knots or parameters
:type knots: list, tuple
:return: basis functions
:rtype: list | [
"Computes",
"the",
"non",
"-",
"vanishing",
"basis",
"functions",
"for",
"a",
"list",
"of",
"parameters",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L173-L190 | train | 225,187 |
orbingol/NURBS-Python | geomdl/helpers.py | basis_function_all | def basis_function_all(degree, knot_vector, span, knot):
""" Computes all non-zero basis functions of all degrees from 0 up to the input degree for a single parameter.
A slightly modified version of Algorithm A2.2 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis functions
:rtype: list
"""
N = [[None for _ in range(degree + 1)] for _ in range(degree + 1)]
for i in range(0, degree + 1):
bfuns = basis_function(i, knot_vector, span, knot)
for j in range(0, i + 1):
N[j][i] = bfuns[j]
return N | python | def basis_function_all(degree, knot_vector, span, knot):
""" Computes all non-zero basis functions of all degrees from 0 up to the input degree for a single parameter.
A slightly modified version of Algorithm A2.2 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis functions
:rtype: list
"""
N = [[None for _ in range(degree + 1)] for _ in range(degree + 1)]
for i in range(0, degree + 1):
bfuns = basis_function(i, knot_vector, span, knot)
for j in range(0, i + 1):
N[j][i] = bfuns[j]
return N | [
"def",
"basis_function_all",
"(",
"degree",
",",
"knot_vector",
",",
"span",
",",
"knot",
")",
":",
"N",
"=",
"[",
"[",
"None",
"for",
"_",
"in",
"range",
"(",
"degree",
"+",
"1",
")",
"]",
"for",
"_",
"in",
"range",
"(",
"degree",
"+",
"1",
")",... | Computes all non-zero basis functions of all degrees from 0 up to the input degree for a single parameter.
A slightly modified version of Algorithm A2.2 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis functions
:rtype: list | [
"Computes",
"all",
"non",
"-",
"zero",
"basis",
"functions",
"of",
"all",
"degrees",
"from",
"0",
"up",
"to",
"the",
"input",
"degree",
"for",
"a",
"single",
"parameter",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L193-L214 | train | 225,188 |
orbingol/NURBS-Python | geomdl/helpers.py | basis_functions_ders | def basis_functions_ders(degree, knot_vector, spans, knots, order):
""" Computes derivatives of the basis functions for a list of parameters.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param spans: list of knot spans
:type spans: list, tuple
:param knots: list of knots or parameters
:type knots: list, tuple
:param order: order of the derivative
:type order: int
:return: derivatives of the basis functions
:rtype: list
"""
basis_ders = []
for span, knot in zip(spans, knots):
basis_ders.append(basis_function_ders(degree, knot_vector, span, knot, order))
return basis_ders | python | def basis_functions_ders(degree, knot_vector, spans, knots, order):
""" Computes derivatives of the basis functions for a list of parameters.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param spans: list of knot spans
:type spans: list, tuple
:param knots: list of knots or parameters
:type knots: list, tuple
:param order: order of the derivative
:type order: int
:return: derivatives of the basis functions
:rtype: list
"""
basis_ders = []
for span, knot in zip(spans, knots):
basis_ders.append(basis_function_ders(degree, knot_vector, span, knot, order))
return basis_ders | [
"def",
"basis_functions_ders",
"(",
"degree",
",",
"knot_vector",
",",
"spans",
",",
"knots",
",",
"order",
")",
":",
"basis_ders",
"=",
"[",
"]",
"for",
"span",
",",
"knot",
"in",
"zip",
"(",
"spans",
",",
"knots",
")",
":",
"basis_ders",
".",
"append... | Computes derivatives of the basis functions for a list of parameters.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector, :math:`U`
:type knot_vector: list, tuple
:param spans: list of knot spans
:type spans: list, tuple
:param knots: list of knots or parameters
:type knots: list, tuple
:param order: order of the derivative
:type order: int
:return: derivatives of the basis functions
:rtype: list | [
"Computes",
"derivatives",
"of",
"the",
"basis",
"functions",
"for",
"a",
"list",
"of",
"parameters",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L307-L326 | train | 225,189 |
orbingol/NURBS-Python | geomdl/helpers.py | basis_function_one | def basis_function_one(degree, knot_vector, span, knot):
""" Computes the value of a basis function for a single parameter.
Implementation of Algorithm 2.4 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis function, :math:`N_{i,p}`
:rtype: float
"""
# Special case at boundaries
if (span == 0 and knot == knot_vector[0]) or \
(span == len(knot_vector) - degree - 2) and knot == knot_vector[len(knot_vector) - 1]:
return 1.0
# Knot is outside of span range
if knot < knot_vector[span] or knot >= knot_vector[span + degree + 1]:
return 0.0
N = [0.0 for _ in range(degree + span + 1)]
# Initialize the zeroth degree basis functions
for j in range(0, degree + 1):
if knot_vector[span + j] <= knot < knot_vector[span + j + 1]:
N[j] = 1.0
# Computing triangular table of basis functions
for k in range(1, degree + 1):
# Detecting zeros saves computations
saved = 0.0
if N[0] != 0.0:
saved = ((knot - knot_vector[span]) * N[0]) / (knot_vector[span + k] - knot_vector[span])
for j in range(0, degree - k + 1):
Uleft = knot_vector[span + j + 1]
Uright = knot_vector[span + j + k + 1]
# Zero detection
if N[j + 1] == 0.0:
N[j] = saved
saved = 0.0
else:
temp = N[j + 1] / (Uright - Uleft)
N[j] = saved + (Uright - knot) * temp
saved = (knot - Uleft) * temp
return N[0] | python | def basis_function_one(degree, knot_vector, span, knot):
""" Computes the value of a basis function for a single parameter.
Implementation of Algorithm 2.4 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis function, :math:`N_{i,p}`
:rtype: float
"""
# Special case at boundaries
if (span == 0 and knot == knot_vector[0]) or \
(span == len(knot_vector) - degree - 2) and knot == knot_vector[len(knot_vector) - 1]:
return 1.0
# Knot is outside of span range
if knot < knot_vector[span] or knot >= knot_vector[span + degree + 1]:
return 0.0
N = [0.0 for _ in range(degree + span + 1)]
# Initialize the zeroth degree basis functions
for j in range(0, degree + 1):
if knot_vector[span + j] <= knot < knot_vector[span + j + 1]:
N[j] = 1.0
# Computing triangular table of basis functions
for k in range(1, degree + 1):
# Detecting zeros saves computations
saved = 0.0
if N[0] != 0.0:
saved = ((knot - knot_vector[span]) * N[0]) / (knot_vector[span + k] - knot_vector[span])
for j in range(0, degree - k + 1):
Uleft = knot_vector[span + j + 1]
Uright = knot_vector[span + j + k + 1]
# Zero detection
if N[j + 1] == 0.0:
N[j] = saved
saved = 0.0
else:
temp = N[j + 1] / (Uright - Uleft)
N[j] = saved + (Uright - knot) * temp
saved = (knot - Uleft) * temp
return N[0] | [
"def",
"basis_function_one",
"(",
"degree",
",",
"knot_vector",
",",
"span",
",",
"knot",
")",
":",
"# Special case at boundaries",
"if",
"(",
"span",
"==",
"0",
"and",
"knot",
"==",
"knot_vector",
"[",
"0",
"]",
")",
"or",
"(",
"span",
"==",
"len",
"(",... | Computes the value of a basis function for a single parameter.
Implementation of Algorithm 2.4 from The NURBS Book by Piegl & Tiller.
:param degree: degree, :math:`p`
:type degree: int
:param knot_vector: knot vector
:type knot_vector: list, tuple
:param span: knot span, :math:`i`
:type span: int
:param knot: knot or parameter, :math:`u`
:type knot: float
:return: basis function, :math:`N_{i,p}`
:rtype: float | [
"Computes",
"the",
"value",
"of",
"a",
"basis",
"function",
"for",
"a",
"single",
"parameter",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/helpers.py#L329-L381 | train | 225,190 |
orbingol/NURBS-Python | geomdl/visualization/VisMPL.py | VisConfig.set_axes_equal | def set_axes_equal(ax):
""" Sets equal aspect ratio across the three axes of a 3D plot.
Contributed by Xuefeng Zhao.
:param ax: a Matplotlib axis, e.g., as output from plt.gca().
"""
bounds = [ax.get_xlim3d(), ax.get_ylim3d(), ax.get_zlim3d()]
ranges = [abs(bound[1] - bound[0]) for bound in bounds]
centers = [np.mean(bound) for bound in bounds]
radius = 0.5 * max(ranges)
lower_limits = centers - radius
upper_limits = centers + radius
ax.set_xlim3d([lower_limits[0], upper_limits[0]])
ax.set_ylim3d([lower_limits[1], upper_limits[1]])
ax.set_zlim3d([lower_limits[2], upper_limits[2]]) | python | def set_axes_equal(ax):
""" Sets equal aspect ratio across the three axes of a 3D plot.
Contributed by Xuefeng Zhao.
:param ax: a Matplotlib axis, e.g., as output from plt.gca().
"""
bounds = [ax.get_xlim3d(), ax.get_ylim3d(), ax.get_zlim3d()]
ranges = [abs(bound[1] - bound[0]) for bound in bounds]
centers = [np.mean(bound) for bound in bounds]
radius = 0.5 * max(ranges)
lower_limits = centers - radius
upper_limits = centers + radius
ax.set_xlim3d([lower_limits[0], upper_limits[0]])
ax.set_ylim3d([lower_limits[1], upper_limits[1]])
ax.set_zlim3d([lower_limits[2], upper_limits[2]]) | [
"def",
"set_axes_equal",
"(",
"ax",
")",
":",
"bounds",
"=",
"[",
"ax",
".",
"get_xlim3d",
"(",
")",
",",
"ax",
".",
"get_ylim3d",
"(",
")",
",",
"ax",
".",
"get_zlim3d",
"(",
")",
"]",
"ranges",
"=",
"[",
"abs",
"(",
"bound",
"[",
"1",
"]",
"-... | Sets equal aspect ratio across the three axes of a 3D plot.
Contributed by Xuefeng Zhao.
:param ax: a Matplotlib axis, e.g., as output from plt.gca(). | [
"Sets",
"equal",
"aspect",
"ratio",
"across",
"the",
"three",
"axes",
"of",
"a",
"3D",
"plot",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/VisMPL.py#L88-L103 | train | 225,191 |
orbingol/NURBS-Python | geomdl/visualization/VisMPL.py | VisSurface.animate | def animate(self, **kwargs):
""" Animates the surface.
This function only animates the triangulated surface. There will be no other elements, such as control points
grid or bounding box.
Keyword arguments:
* ``colormap``: applies colormap to the surface
Colormaps are a visualization feature of Matplotlib. They can be used for several types of surface plots via
the following import statement: ``from matplotlib import cm``
The following link displays the list of Matplolib colormaps and some examples on colormaps:
https://matplotlib.org/tutorials/colors/colormaps.html
"""
# Calling parent render function
super(VisSurface, self).render(**kwargs)
# Colormaps
surf_cmaps = kwargs.get('colormap', None)
# Initialize variables
tri_idxs = []
vert_coords = []
trisurf_params = []
frames = []
frames_tris = []
num_vertices = 0
# Start plotting of the surface and the control points grid
fig = plt.figure(figsize=self.vconf.figure_size, dpi=self.vconf.figure_dpi)
ax = Axes3D(fig)
# Start plotting
surf_count = 0
for plot in self._plots:
# Plot evaluated points
if plot['type'] == 'evalpts' and self.vconf.display_evalpts:
# Use internal triangulation algorithm instead of Qhull (MPL default)
verts = plot['ptsarr'][0]
tris = plot['ptsarr'][1]
# Extract zero-indexed vertex number list
tri_idxs += [[ti + num_vertices for ti in tri.data] for tri in tris]
# Extract vertex coordinates
vert_coords += [vert.data for vert in verts]
# Update number of vertices
num_vertices = len(vert_coords)
# Determine the color or the colormap of the triangulated plot
params = {}
if surf_cmaps:
try:
params['cmap'] = surf_cmaps[surf_count]
surf_count += 1
except IndexError:
params['color'] = plot['color']
else:
params['color'] = plot['color']
trisurf_params += [params for _ in range(len(tris))]
# Pre-processing for the animation
pts = np.array(vert_coords, dtype=self.vconf.dtype)
# Create the frames (Artists)
for tidx, pidx in zip(tri_idxs, trisurf_params):
frames_tris.append(tidx)
# Create MPL Triangulation object
triangulation = mpltri.Triangulation(pts[:, 0], pts[:, 1], triangles=frames_tris)
# Use custom Triangulation object and the choice of color/colormap to plot the surface
p3df = ax.plot_trisurf(triangulation, pts[:, 2], alpha=self.vconf.alpha, **pidx)
# Add to frames list
frames.append([p3df])
# Create MPL ArtistAnimation
ani = animation.ArtistAnimation(fig, frames, interval=100, blit=True, repeat_delay=1000)
# Remove axes
if not self.vconf.display_axes:
plt.axis('off')
# Set axes equal
if self.vconf.axes_equal:
self.vconf.set_axes_equal(ax)
# Axis labels
if self.vconf.display_labels:
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
# Process keyword arguments
fig_filename = kwargs.get('fig_save_as', None)
fig_display = kwargs.get('display_plot', True)
# Display the plot
if fig_display:
plt.show()
else:
fig_filename = self.vconf.figure_image_filename if fig_filename is None else fig_filename
# Save the figure
self.vconf.save_figure_as(fig, fig_filename)
# Return the figure object
return fig | python | def animate(self, **kwargs):
""" Animates the surface.
This function only animates the triangulated surface. There will be no other elements, such as control points
grid or bounding box.
Keyword arguments:
* ``colormap``: applies colormap to the surface
Colormaps are a visualization feature of Matplotlib. They can be used for several types of surface plots via
the following import statement: ``from matplotlib import cm``
The following link displays the list of Matplolib colormaps and some examples on colormaps:
https://matplotlib.org/tutorials/colors/colormaps.html
"""
# Calling parent render function
super(VisSurface, self).render(**kwargs)
# Colormaps
surf_cmaps = kwargs.get('colormap', None)
# Initialize variables
tri_idxs = []
vert_coords = []
trisurf_params = []
frames = []
frames_tris = []
num_vertices = 0
# Start plotting of the surface and the control points grid
fig = plt.figure(figsize=self.vconf.figure_size, dpi=self.vconf.figure_dpi)
ax = Axes3D(fig)
# Start plotting
surf_count = 0
for plot in self._plots:
# Plot evaluated points
if plot['type'] == 'evalpts' and self.vconf.display_evalpts:
# Use internal triangulation algorithm instead of Qhull (MPL default)
verts = plot['ptsarr'][0]
tris = plot['ptsarr'][1]
# Extract zero-indexed vertex number list
tri_idxs += [[ti + num_vertices for ti in tri.data] for tri in tris]
# Extract vertex coordinates
vert_coords += [vert.data for vert in verts]
# Update number of vertices
num_vertices = len(vert_coords)
# Determine the color or the colormap of the triangulated plot
params = {}
if surf_cmaps:
try:
params['cmap'] = surf_cmaps[surf_count]
surf_count += 1
except IndexError:
params['color'] = plot['color']
else:
params['color'] = plot['color']
trisurf_params += [params for _ in range(len(tris))]
# Pre-processing for the animation
pts = np.array(vert_coords, dtype=self.vconf.dtype)
# Create the frames (Artists)
for tidx, pidx in zip(tri_idxs, trisurf_params):
frames_tris.append(tidx)
# Create MPL Triangulation object
triangulation = mpltri.Triangulation(pts[:, 0], pts[:, 1], triangles=frames_tris)
# Use custom Triangulation object and the choice of color/colormap to plot the surface
p3df = ax.plot_trisurf(triangulation, pts[:, 2], alpha=self.vconf.alpha, **pidx)
# Add to frames list
frames.append([p3df])
# Create MPL ArtistAnimation
ani = animation.ArtistAnimation(fig, frames, interval=100, blit=True, repeat_delay=1000)
# Remove axes
if not self.vconf.display_axes:
plt.axis('off')
# Set axes equal
if self.vconf.axes_equal:
self.vconf.set_axes_equal(ax)
# Axis labels
if self.vconf.display_labels:
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
# Process keyword arguments
fig_filename = kwargs.get('fig_save_as', None)
fig_display = kwargs.get('display_plot', True)
# Display the plot
if fig_display:
plt.show()
else:
fig_filename = self.vconf.figure_image_filename if fig_filename is None else fig_filename
# Save the figure
self.vconf.save_figure_as(fig, fig_filename)
# Return the figure object
return fig | [
"def",
"animate",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"# Calling parent render function",
"super",
"(",
"VisSurface",
",",
"self",
")",
".",
"render",
"(",
"*",
"*",
"kwargs",
")",
"# Colormaps",
"surf_cmaps",
"=",
"kwargs",
".",
"get",
"(",
... | Animates the surface.
This function only animates the triangulated surface. There will be no other elements, such as control points
grid or bounding box.
Keyword arguments:
* ``colormap``: applies colormap to the surface
Colormaps are a visualization feature of Matplotlib. They can be used for several types of surface plots via
the following import statement: ``from matplotlib import cm``
The following link displays the list of Matplolib colormaps and some examples on colormaps:
https://matplotlib.org/tutorials/colors/colormaps.html | [
"Animates",
"the",
"surface",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/visualization/VisMPL.py#L298-L402 | train | 225,192 |
orbingol/NURBS-Python | geomdl/_operations.py | tangent_curve_single_list | def tangent_curve_single_list(obj, param_list, normalize):
""" Evaluates the curve tangent vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = tangent_curve_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | python | def tangent_curve_single_list(obj, param_list, normalize):
""" Evaluates the curve tangent vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = tangent_curve_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | [
"def",
"tangent_curve_single_list",
"(",
"obj",
",",
"param_list",
",",
"normalize",
")",
":",
"ret_vector",
"=",
"[",
"]",
"for",
"param",
"in",
"param_list",
":",
"temp",
"=",
"tangent_curve_single",
"(",
"obj",
",",
"param",
",",
"normalize",
")",
"ret_ve... | Evaluates the curve tangent vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple | [
"Evaluates",
"the",
"curve",
"tangent",
"vectors",
"at",
"the",
"given",
"list",
"of",
"parameter",
"values",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_operations.py#L41-L57 | train | 225,193 |
orbingol/NURBS-Python | geomdl/_operations.py | normal_curve_single | def normal_curve_single(obj, u, normalize):
""" Evaluates the curve normal vector at the input parameter, u.
Curve normal is calculated from the 2nd derivative of the curve at the input parameter, u.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input curve
:type obj: abstract.Curve
:param u: parameter
:type u: float
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
# 2nd derivative of the curve gives the normal
ders = obj.derivatives(u, 2)
point = ders[0]
vector = linalg.vector_normalize(ders[2]) if normalize else ders[2]
return tuple(point), tuple(vector) | python | def normal_curve_single(obj, u, normalize):
""" Evaluates the curve normal vector at the input parameter, u.
Curve normal is calculated from the 2nd derivative of the curve at the input parameter, u.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input curve
:type obj: abstract.Curve
:param u: parameter
:type u: float
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
# 2nd derivative of the curve gives the normal
ders = obj.derivatives(u, 2)
point = ders[0]
vector = linalg.vector_normalize(ders[2]) if normalize else ders[2]
return tuple(point), tuple(vector) | [
"def",
"normal_curve_single",
"(",
"obj",
",",
"u",
",",
"normalize",
")",
":",
"# 2nd derivative of the curve gives the normal",
"ders",
"=",
"obj",
".",
"derivatives",
"(",
"u",
",",
"2",
")",
"point",
"=",
"ders",
"[",
"0",
"]",
"vector",
"=",
"linalg",
... | Evaluates the curve normal vector at the input parameter, u.
Curve normal is calculated from the 2nd derivative of the curve at the input parameter, u.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input curve
:type obj: abstract.Curve
:param u: parameter
:type u: float
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple | [
"Evaluates",
"the",
"curve",
"normal",
"vector",
"at",
"the",
"input",
"parameter",
"u",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_operations.py#L60-L81 | train | 225,194 |
orbingol/NURBS-Python | geomdl/_operations.py | normal_curve_single_list | def normal_curve_single_list(obj, param_list, normalize):
""" Evaluates the curve normal vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = normal_curve_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | python | def normal_curve_single_list(obj, param_list, normalize):
""" Evaluates the curve normal vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = normal_curve_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | [
"def",
"normal_curve_single_list",
"(",
"obj",
",",
"param_list",
",",
"normalize",
")",
":",
"ret_vector",
"=",
"[",
"]",
"for",
"param",
"in",
"param_list",
":",
"temp",
"=",
"normal_curve_single",
"(",
"obj",
",",
"param",
",",
"normalize",
")",
"ret_vect... | Evaluates the curve normal vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple | [
"Evaluates",
"the",
"curve",
"normal",
"vectors",
"at",
"the",
"given",
"list",
"of",
"parameter",
"values",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_operations.py#L84-L100 | train | 225,195 |
orbingol/NURBS-Python | geomdl/_operations.py | binormal_curve_single | def binormal_curve_single(obj, u, normalize):
""" Evaluates the curve binormal vector at the given u parameter.
Curve binormal is the cross product of the normal and the tangent vectors.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input curve
:type obj: abstract.Curve
:param u: parameter
:type u: float
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
# Cross product of tangent and normal vectors gives binormal vector
tan_vector = tangent_curve_single(obj, u, normalize)
norm_vector = normal_curve_single(obj, u, normalize)
point = tan_vector[0]
vector = linalg.vector_cross(tan_vector[1], norm_vector[1])
vector = linalg.vector_normalize(vector) if normalize else vector
return tuple(point), tuple(vector) | python | def binormal_curve_single(obj, u, normalize):
""" Evaluates the curve binormal vector at the given u parameter.
Curve binormal is the cross product of the normal and the tangent vectors.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input curve
:type obj: abstract.Curve
:param u: parameter
:type u: float
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
# Cross product of tangent and normal vectors gives binormal vector
tan_vector = tangent_curve_single(obj, u, normalize)
norm_vector = normal_curve_single(obj, u, normalize)
point = tan_vector[0]
vector = linalg.vector_cross(tan_vector[1], norm_vector[1])
vector = linalg.vector_normalize(vector) if normalize else vector
return tuple(point), tuple(vector) | [
"def",
"binormal_curve_single",
"(",
"obj",
",",
"u",
",",
"normalize",
")",
":",
"# Cross product of tangent and normal vectors gives binormal vector",
"tan_vector",
"=",
"tangent_curve_single",
"(",
"obj",
",",
"u",
",",
"normalize",
")",
"norm_vector",
"=",
"normal_c... | Evaluates the curve binormal vector at the given u parameter.
Curve binormal is the cross product of the normal and the tangent vectors.
The output returns a list containing the starting point (i.e. origin) of the vector and the vector itself.
:param obj: input curve
:type obj: abstract.Curve
:param u: parameter
:type u: float
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple | [
"Evaluates",
"the",
"curve",
"binormal",
"vector",
"at",
"the",
"given",
"u",
"parameter",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_operations.py#L103-L126 | train | 225,196 |
orbingol/NURBS-Python | geomdl/_operations.py | binormal_curve_single_list | def binormal_curve_single_list(obj, param_list, normalize):
""" Evaluates the curve binormal vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = binormal_curve_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | python | def binormal_curve_single_list(obj, param_list, normalize):
""" Evaluates the curve binormal vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = binormal_curve_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | [
"def",
"binormal_curve_single_list",
"(",
"obj",
",",
"param_list",
",",
"normalize",
")",
":",
"ret_vector",
"=",
"[",
"]",
"for",
"param",
"in",
"param_list",
":",
"temp",
"=",
"binormal_curve_single",
"(",
"obj",
",",
"param",
",",
"normalize",
")",
"ret_... | Evaluates the curve binormal vectors at the given list of parameter values.
:param obj: input curve
:type obj: abstract.Curve
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple | [
"Evaluates",
"the",
"curve",
"binormal",
"vectors",
"at",
"the",
"given",
"list",
"of",
"parameter",
"values",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_operations.py#L129-L145 | train | 225,197 |
orbingol/NURBS-Python | geomdl/_operations.py | tangent_surface_single_list | def tangent_surface_single_list(obj, param_list, normalize):
""" Evaluates the surface tangent vectors at the given list of parameter values.
:param obj: input surface
:type obj: abstract.Surface
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = tangent_surface_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | python | def tangent_surface_single_list(obj, param_list, normalize):
""" Evaluates the surface tangent vectors at the given list of parameter values.
:param obj: input surface
:type obj: abstract.Surface
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = tangent_surface_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | [
"def",
"tangent_surface_single_list",
"(",
"obj",
",",
"param_list",
",",
"normalize",
")",
":",
"ret_vector",
"=",
"[",
"]",
"for",
"param",
"in",
"param_list",
":",
"temp",
"=",
"tangent_surface_single",
"(",
"obj",
",",
"param",
",",
"normalize",
")",
"re... | Evaluates the surface tangent vectors at the given list of parameter values.
:param obj: input surface
:type obj: abstract.Surface
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple | [
"Evaluates",
"the",
"surface",
"tangent",
"vectors",
"at",
"the",
"given",
"list",
"of",
"parameter",
"values",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_operations.py#L172-L188 | train | 225,198 |
orbingol/NURBS-Python | geomdl/_operations.py | normal_surface_single_list | def normal_surface_single_list(obj, param_list, normalize):
""" Evaluates the surface normal vectors at the given list of parameter values.
:param obj: input surface
:type obj: abstract.Surface
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = normal_surface_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | python | def normal_surface_single_list(obj, param_list, normalize):
""" Evaluates the surface normal vectors at the given list of parameter values.
:param obj: input surface
:type obj: abstract.Surface
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple
"""
ret_vector = []
for param in param_list:
temp = normal_surface_single(obj, param, normalize)
ret_vector.append(temp)
return tuple(ret_vector) | [
"def",
"normal_surface_single_list",
"(",
"obj",
",",
"param_list",
",",
"normalize",
")",
":",
"ret_vector",
"=",
"[",
"]",
"for",
"param",
"in",
"param_list",
":",
"temp",
"=",
"normal_surface_single",
"(",
"obj",
",",
"param",
",",
"normalize",
")",
"ret_... | Evaluates the surface normal vectors at the given list of parameter values.
:param obj: input surface
:type obj: abstract.Surface
:param param_list: parameter list
:type param_list: list or tuple
:param normalize: if True, the returned vector is converted to a unit vector
:type normalize: bool
:return: a list containing "point" and "vector" pairs
:rtype: tuple | [
"Evaluates",
"the",
"surface",
"normal",
"vectors",
"at",
"the",
"given",
"list",
"of",
"parameter",
"values",
"."
] | b1c6a8b51cf143ff58761438e93ba6baef470627 | https://github.com/orbingol/NURBS-Python/blob/b1c6a8b51cf143ff58761438e93ba6baef470627/geomdl/_operations.py#L215-L231 | train | 225,199 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.