text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def index():
"""Return information about each type of resource and how it can be accessed.""" |
classes = []
with app.app_context():
classes = set(current_app.class_references.values())
if _get_acceptable_response_type() == JSON:
meta_data = {}
for cls in classes:
meta_data[cls.endpoint()] = {
'link': '/' + cls.endpoint(),
'meta': '/' + cls.endpoint() + '/meta'
}
return jsonify(meta_data)
else:
return render_template('index.html', classes=classes) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_meta(collection):
"""Return the meta-description of a given resource. :param collection: The collection to get meta-info for """ |
cls = endpoint_class(collection)
description = cls.meta()
return jsonify(description) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def abort(self):
"""Return an HTML Response representation of the exception.""" |
resp = make_response(render_template('error.html', error=self.code, message=self.message), self.code)
return resp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_endpoint_classes(db, generate_pks=False):
"""Return a list of model classes generated for each reflected database table.""" |
seen_classes = set()
for cls in current_app.class_references.values():
seen_classes.add(cls.__tablename__)
with app.app_context():
db.metadata.reflect(bind=db.engine)
for name, table in db.metadata.tables.items():
if not name in seen_classes:
seen_classes.add(name)
if not table.primary_key and generate_pks:
cls = add_pk_if_required(db, table, name)
else:
cls = type(
str(name),
(sandman_model, db.Model),
{'__tablename__': name})
register(cls) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def add_pk_if_required(db, table, name):
"""Return a class deriving from our Model class as well as the SQLAlchemy model. :param `sqlalchemy.schema.Table` table: table to create primary key for :param table: table to create primary key for """ |
db.metadata.reflect(bind=db.engine)
cls_dict = {'__tablename__': name}
if not table.primary_key:
for column in table.columns:
column.primary_key = True
Table(name, db.metadata, *table.columns, extend_existing=True)
cls_dict['__table__'] = table
db.metadata.create_all(bind=db.engine)
return type(str(name), (sandman_model, db.Model), cls_dict) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prepare_relationships(db, known_tables):
"""Enrich the registered Models with SQLAlchemy ``relationships`` so that related tables are correctly processed up by the admin. """ |
inspector = reflection.Inspector.from_engine(db.engine)
for cls in set(known_tables.values()):
for foreign_key in inspector.get_foreign_keys(cls.__tablename__):
if foreign_key['referred_table'] in known_tables:
other = known_tables[foreign_key['referred_table']]
constrained_column = foreign_key['constrained_columns']
if other not in cls.__related_tables__ and cls not in (
other.__related_tables__) and other != cls:
cls.__related_tables__.add(other)
# Add a SQLAlchemy relationship as an attribute
# on the class
setattr(cls, other.__table__.name, relationship(
other.__name__, backref=db.backref(
cls.__name__.lower()),
foreign_keys=str(cls.__name__) + '.' +
''.join(constrained_column))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def register_classes_for_admin(db_session, show_pks=True, name='admin'):
"""Registers classes for the Admin view that ultimately creates the admin interface. :param db_session: handle to database session :param list classes: list of classes to register with the admin :param bool show_pks: show primary key columns in the admin? """ |
with app.app_context():
admin_view = Admin(current_app, name=name)
for cls in set(
cls for cls in current_app.class_references.values() if
cls.use_admin):
column_list = [column.name for column in
cls.__table__.columns.values()]
if hasattr(cls, '__view__'):
# allow ability for model classes to specify model views
admin_view_class = type(
'AdminView',
(cls.__view__,),
{'form_columns': column_list})
elif show_pks:
# the default of Flask-SQLAlchemy is to not show primary
# classes, which obviously isn't acceptable in some cases
admin_view_class = type(
'AdminView',
(AdminModelViewWithPK,),
{'form_columns': column_list})
else:
admin_view_class = ModelView
admin_view.add_view(admin_view_class(cls, db_session)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_classes(self, xml_document, src_path):
""" Given a path and parsed xml_document provides class nodes with the relevant lines First, we look to see if xml_document contains a source node providing paths to search for If we don't have that we check each nodes filename attribute matches an absolute path Finally, if we found no nodes, we check the filename attribute for the relative path """ |
# Remove git_root from src_path for searching the correct filename
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `diff_cover/violations_reporter.py`
# search for `violations_reporter.py`
src_rel_path = self._to_unix_path(GitPathTool.relative_path(src_path))
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `other_package/some_file.py`
# search for `/home/user/work/diff-cover/other_package/some_file.py`
src_abs_path = self._to_unix_path(GitPathTool.absolute_path(src_path))
# cobertura sometimes provides the sources for the measurements
# within it. If we have that we outta use it
sources = xml_document.findall('sources/source')
sources = [source.text for source in sources if source.text]
classes = [class_tree
for class_tree in xml_document.findall(".//class")
or []]
classes = (
[clazz for clazz in classes if
src_abs_path in [
self._to_unix_path(
os.path.join(
source.strip(),
clazz.get('filename')
)
) for source in sources]]
or
[clazz for clazz in classes if
self._to_unix_path(clazz.get('filename')) == src_abs_path]
or
[clazz for clazz in classes if
self._to_unix_path(clazz.get('filename')) == src_rel_path]
)
return classes |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _cache_file(self, src_path):
""" Load the data from `self._xml_roots` for `src_path`, if it hasn't been already. """ |
# If we have not yet loaded this source file
if src_path not in self._info_cache:
# We only want to keep violations that show up in each xml source.
# Thus, each time, we take the intersection. However, to do this
# we must treat the first time as a special case and just add all
# the violations from the first xml report.
violations = None
# A line is measured if it is measured in any of the reports, so
# we take set union each time and can just start with the empty set
measured = set()
# Loop through the files that contain the xml roots
for xml_document in self._xml_roots:
if xml_document.findall('.[@clover]'):
# see etc/schema/clover.xsd at https://bitbucket.org/atlassian/clover/src
line_nodes = self._get_src_path_line_nodes_clover(xml_document, src_path)
_number = 'num'
_hits = 'count'
elif xml_document.findall('.[@name]'):
# https://github.com/jacoco/jacoco/blob/master/org.jacoco.report/src/org/jacoco/report/xml/report.dtd
line_nodes = self._get_src_path_line_nodes_jacoco(xml_document, src_path)
_number = 'nr'
_hits = 'ci'
else:
# https://github.com/cobertura/web/blob/master/htdocs/xml/coverage-04.dtd
line_nodes = self._get_src_path_line_nodes_cobertura(xml_document, src_path)
_number = 'number'
_hits = 'hits'
if line_nodes is None:
continue
# First case, need to define violations initially
if violations is None:
violations = set(
Violation(int(line.get(_number)), None)
for line in line_nodes
if int(line.get(_hits, 0)) == 0)
# If we already have a violations set,
# take the intersection of the new
# violations set and its old self
else:
violations = violations & set(
Violation(int(line.get(_number)), None)
for line in line_nodes
if int(line.get(_hits, 0)) == 0
)
# Measured is the union of itself and the new measured
measured = measured | set(
int(line.get(_number)) for line in line_nodes
)
# If we don't have any information about the source file,
# don't report any violations
if violations is None:
violations = set()
self._info_cache[src_path] = (violations, measured) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _process_dupe_code_violation(self, lines, current_line, message):
""" The duplicate code violation is a multi line error. This pulls out all the relevant files """ |
src_paths = []
message_match = self.dupe_code_violation_regex.match(message)
if message_match:
for _ in range(int(message_match.group(1))):
current_line += 1
match = self.multi_line_violation_regex.match(
lines[current_line]
)
src_path, l_number = match.groups()
src_paths.append(('%s.py' % src_path, l_number))
return src_paths |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_cwd(cls, cwd):
""" Set the cwd that is used to manipulate paths. """ |
if not cwd:
try:
cwd = os.getcwdu()
except AttributeError:
cwd = os.getcwd()
if isinstance(cwd, six.binary_type):
cwd = cwd.decode(sys.getdefaultencoding())
cls._cwd = cwd
cls._root = cls._git_root() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def relative_path(cls, git_diff_path):
""" Returns git_diff_path relative to cwd. """ |
# Remove git_root from src_path for searching the correct filename
# If cwd is `/home/user/work/diff-cover/diff_cover`
# and src_path is `diff_cover/violations_reporter.py`
# search for `violations_reporter.py`
root_rel_path = os.path.relpath(cls._cwd, cls._root)
rel_path = os.path.relpath(git_diff_path, root_rel_path)
return rel_path |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _is_path_excluded(self, path):
""" Check if a path is excluded. :param str path: Path to check against the exclude patterns. :returns: True if there are exclude patterns and the path matches, otherwise False. """ |
exclude = self._exclude
if not exclude:
return False
basename = os.path.basename(path)
if self._fnmatch(basename, exclude):
return True
absolute_path = os.path.abspath(path)
match = self._fnmatch(absolute_path, exclude)
return match |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def src_paths_changed(self):
""" See base class docstring. """ |
# Get the diff dictionary
diff_dict = self._git_diff()
# Return the changed file paths (dict keys)
# in alphabetical order
return sorted(diff_dict.keys(), key=lambda x: x.lower()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_included_diff_results(self):
""" Return a list of stages to be included in the diff results. """ |
included = [self._git_diff_tool.diff_committed(self._compare_branch)]
if not self._ignore_staged:
included.append(self._git_diff_tool.diff_staged())
if not self._ignore_unstaged:
included.append(self._git_diff_tool.diff_unstaged())
return included |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _git_diff(self):
""" Run `git diff` and returns a dict in which the keys are changed file paths and the values are lists of line numbers. Guarantees that each line number within a file is unique (no repeats) and in ascending order. Returns a cached result if called multiple times. Raises a GitDiffError if `git diff` has an error. """ |
# If we do not have a cached result, execute `git diff`
if self._diff_dict is None:
result_dict = dict()
for diff_str in self._get_included_diff_results():
# Parse the output of the diff string
diff_dict = self._parse_diff_str(diff_str)
for src_path in diff_dict.keys():
if self._is_path_excluded(src_path):
continue
# If no _supported_extensions provided, or extension present: process
root, extension = os.path.splitext(src_path)
extension = extension[1:].lower()
# 'not self._supported_extensions' tests for both None and empty list []
if not self._supported_extensions or extension in self._supported_extensions:
added_lines, deleted_lines = diff_dict[src_path]
# Remove any lines from the dict that have been deleted
# Include any lines that have been added
result_dict[src_path] = [
line for line in result_dict.get(src_path, [])
if not line in deleted_lines
] + added_lines
# Eliminate repeats and order line numbers
for (src_path, lines) in result_dict.items():
result_dict[src_path] = self._unique_ordered_lines(lines)
# Store the resulting dict
self._diff_dict = result_dict
# Return the diff cache
return self._diff_dict |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_source_sections(self, diff_str):
""" Given the output of `git diff`, return a dictionary with keys that are source file paths. Each value is a list of lines from the `git diff` output related to the source file. Raises a `GitDiffError` if `diff_str` is in an invalid format. """ |
# Create a dict to map source files to lines in the diff output
source_dict = dict()
# Keep track of the current source file
src_path = None
# Signal that we've found a hunk (after starting a source file)
found_hunk = False
# Parse the diff string into sections by source file
for line in diff_str.split('\n'):
# If the line starts with "diff --git"
# or "diff --cc" (in the case of a merge conflict)
# then it is the start of a new source file
if line.startswith('diff --git') or line.startswith('diff --cc'):
# Retrieve the name of the source file
src_path = self._parse_source_line(line)
# Create an entry for the source file, if we don't
# already have one.
if src_path not in source_dict:
source_dict[src_path] = []
# Signal that we're waiting for a hunk for this source file
found_hunk = False
# Every other line is stored in the dictionary for this source file
# once we find a hunk section
else:
# Only add lines if we're in a hunk section
# (ignore index and files changed lines)
if found_hunk or line.startswith('@@'):
# Remember that we found a hunk
found_hunk = True
if src_path is not None:
source_dict[src_path].append(line)
else:
# We tolerate other information before we have
# a source file defined, unless it's a hunk line
if line.startswith("@@"):
msg = "Hunk has no source file: '{}'".format(line)
raise GitDiffError(msg)
return source_dict |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_source_line(self, line):
""" Given a source line in `git diff` output, return the path to the source file. """ |
if '--git' in line:
regex = self.SRC_FILE_RE
elif '--cc' in line:
regex = self.MERGE_CONFLICT_RE
else:
msg = "Do not recognize format of source in line '{}'".format(line)
raise GitDiffError(msg)
# Parse for the source file path
groups = regex.findall(line)
if len(groups) == 1:
return groups[0]
else:
msg = "Could not parse source path in line '{}'".format(line)
raise GitDiffError(msg) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_hunk_line(self, line):
""" Given a hunk line in `git diff` output, return the line number at the start of the hunk. A hunk is a segment of code that contains changes. The format of the hunk line is: @@ -k,l +n,m @@ TEXT where `k,l` represent the start line and length before the changes and `n,m` represent the start line and length after the changes. `git diff` will sometimes put a code excerpt from within the hunk in the `TEXT` section of the line. """ |
# Split the line at the @@ terminators (start and end of the line)
components = line.split('@@')
# The first component should be an empty string, because
# the line starts with '@@'. The second component should
# be the hunk information, and any additional components
# are excerpts from the code.
if len(components) >= 2:
hunk_info = components[1]
groups = self.HUNK_LINE_RE.findall(hunk_info)
if len(groups) == 1:
try:
return int(groups[0])
except ValueError:
msg = "Could not parse '{}' as a line number".format(groups[0])
raise GitDiffError(msg)
else:
msg = "Could not find start of hunk in line '{}'".format(line)
raise GitDiffError(msg)
else:
msg = "Could not parse hunk in line '{}'".format(line)
raise GitDiffError(msg) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _unique_ordered_lines(line_numbers):
""" Given a list of line numbers, return a list in which each line number is included once and the lines are ordered sequentially. """ |
if len(line_numbers) == 0:
return []
# Ensure lines are unique by putting them in a set
line_set = set(line_numbers)
# Retrieve the list from the set, sort it, and return
return sorted([line for line in line_set]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def src_paths(self):
""" Return a list of source files in the diff for which we have coverage information. """ |
return {src for src, summary in self._diff_violations().items()
if len(summary.measured_lines) > 0} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def percent_covered(self, src_path):
""" Return a float percent of lines covered for the source in `src_path`. If we have no coverage information for `src_path`, returns None """ |
diff_violations = self._diff_violations().get(src_path)
if diff_violations is None:
return None
# Protect against a divide by zero
num_measured = len(diff_violations.measured_lines)
if num_measured > 0:
num_uncovered = len(diff_violations.lines)
return 100 - float(num_uncovered) / num_measured * 100
else:
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def total_num_lines(self):
""" Return the total number of lines in the diff for which we have coverage info. """ |
return sum([len(summary.measured_lines) for summary
in self._diff_violations().values()]) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def total_num_violations(self):
""" Returns the total number of lines in the diff that are in violation. """ |
return sum(
len(summary.lines)
for summary
in self._diff_violations().values()
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_report(self, output_file):
""" See base class. output_file must be a file handler that takes in bytes! """ |
if self.TEMPLATE_NAME is not None:
template = TEMPLATE_ENV.get_template(self.TEMPLATE_NAME)
report = template.render(self._context())
if isinstance(report, six.string_types):
report = report.encode('utf-8')
output_file.write(report) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def generate_css(self, output_file):
""" Generate an external style sheet file. output_file must be a file handler that takes in bytes! """ |
if self.CSS_TEMPLATE_NAME is not None:
template = TEMPLATE_ENV.get_template(self.CSS_TEMPLATE_NAME)
style = template.render(self._context())
if isinstance(style, six.string_types):
style = style.encode('utf-8')
output_file.write(style) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _context(self):
""" Return the context to pass to the template. The context is a dict of the form: { 'css_url': CSS_URL, 'report_name': REPORT_NAME, 'diff_name': DIFF_NAME, 'src_stats': {SRC_PATH: { 'percent_covered': PERCENT_COVERED, 'total_num_lines': TOTAL_NUM_LINES, 'total_num_violations': TOTAL_NUM_VIOLATIONS, 'total_percent_covered': TOTAL_PERCENT_COVERED } """ |
# Calculate the information to pass to the template
src_stats = {
src: self._src_path_stats(src) for src in self.src_paths()
}
# Include snippet style info if we're displaying
# source code snippets
if self.INCLUDE_SNIPPETS:
snippet_style = Snippet.style_defs()
else:
snippet_style = None
return {
'css_url': self.css_url,
'report_name': self.coverage_report_name(),
'diff_name': self.diff_report_name(),
'src_stats': src_stats,
'total_num_lines': self.total_num_lines(),
'total_num_violations': self.total_num_violations(),
'total_percent_covered': self.total_percent_covered(),
'snippet_style': snippet_style
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def combine_adjacent_lines(line_numbers):
""" Given a sorted collection of line numbers this will turn them to strings and combine adjacent values [1, 2, 5, 6, 100] -> ["1-2", "5-6", "100"] """ |
combine_template = "{0}-{1}"
combined_list = []
# Add a terminating value of `None` to list
line_numbers.append(None)
start = line_numbers[0]
end = None
for line_number in line_numbers[1:]:
# If the current number is adjacent to the previous number
if (end if end else start) + 1 == line_number:
end = line_number
else:
if end:
combined_list.append(combine_template.format(start, end))
else:
combined_list.append(str(start))
start = line_number
end = None
return combined_list |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _src_path_stats(self, src_path):
""" Return a dict of statistics for the source file at `src_path`. """ |
# Find violation lines
violation_lines = self.violation_lines(src_path)
violations = sorted(self._diff_violations()[src_path].violations)
# Load source snippets (if the report will display them)
# If we cannot load the file, then fail gracefully
if self.INCLUDE_SNIPPETS:
try:
snippets = Snippet.load_snippets_html(src_path, violation_lines)
except IOError:
snippets = []
else:
snippets = []
return {
'percent_covered': self.percent_covered(src_path),
'violation_lines': TemplateReportGenerator.combine_adjacent_lines(violation_lines),
'violations': violations,
'snippets_html': snippets
} |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run_command_for_code(command):
""" Returns command's exit code. """ |
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
process.communicate()
exit_code = process.returncode
return exit_code |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def style_defs(cls):
""" Return the CSS style definitions required by the formatted snippet. """ |
formatter = HtmlFormatter()
formatter.style.highlight_color = cls.VIOLATION_COLOR
return formatter.get_style_defs() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def html(self):
""" Return an HTML representation of the snippet. """ |
formatter = HtmlFormatter(
cssclass=self.DIV_CSS_CLASS,
linenos=True,
linenostart=self._start_line,
hl_lines=self._shift_lines(
self._violation_lines,
self._start_line
),
lineanchors=self._src_filename
)
return pygments.format(self.src_tokens(), formatter) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_snippets_html(cls, src_path, violation_lines):
""" Load snippets from the file at `src_path` and format them as HTML. See `load_snippets()` for details. """ |
snippet_list = cls.load_snippets(src_path, violation_lines)
return [snippet.html() for snippet in snippet_list] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _group_tokens(cls, token_stream, range_list):
""" Group tokens into snippet ranges. `token_stream` is a generator that produces `(token_type, value)` tuples, `range_list` is a list of `(start, end)` tuples representing the (inclusive) range of line numbers for each snippet. Assumes that `range_list` is an ascending order by start value. Returns a dict mapping ranges to lists of tokens: { } The algorithm is slightly complicated because a single token can contain multiple line breaks. """ |
# Create a map from ranges (start/end tuples) to tokens
token_map = {rng: [] for rng in range_list}
# Keep track of the current line number; we will
# increment this as we encounter newlines in token values
line_num = 1
for ttype, val in token_stream:
# If there are newlines in this token,
# we need to split it up and check whether
# each line within the token is within one
# of our ranges.
if '\n' in val:
val_lines = val.split('\n')
# Check if the tokens match each range
for (start, end), filtered_tokens in six.iteritems(token_map):
# Filter out lines that are not in this range
include_vals = [
val_lines[i] for i in
range(0, len(val_lines))
if i + line_num in range(start, end + 1)
]
# If we found any lines, store the tokens
if len(include_vals) > 0:
token = (ttype, '\n'.join(include_vals))
filtered_tokens.append(token)
# Increment the line number
# by the number of lines we found
line_num += len(val_lines) - 1
# No newline in this token
# If we're in the line range, add it
else:
# Check if the tokens match each range
for (start, end), filtered_tokens in six.iteritems(token_map):
# If we got a match, store the token
if line_num in range(start, end + 1):
filtered_tokens.append((ttype, val))
# Otherwise, ignore the token
return token_map |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def violations(self, src_path):
""" Return a list of Violations recorded in `src_path`. """ |
if not any(src_path.endswith(ext) for ext in self.driver.supported_extensions):
return []
if src_path not in self.violations_dict:
if self.reports:
self.violations_dict = self.driver.parse_reports(self.reports)
else:
if self.driver_tool_installed is None:
self.driver_tool_installed = self.driver.installed()
if not self.driver_tool_installed:
raise EnvironmentError("{} is not installed".format(self.driver.name))
command = copy.deepcopy(self.driver.command)
if self.options:
command.append(self.options)
if os.path.exists(src_path):
command.append(src_path.encode(sys.getfilesystemencoding()))
output, _ = execute(command, self.driver.exit_codes)
self.violations_dict.update(self.driver.parse_reports([output]))
return self.violations_dict[src_path] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def configure(self, transport, auth, address, port):
""" Connect paramiko transport :type auth: :py:class`margaritashotgun.auth.AuthMethods` :param auth: authentication object :type address: str :param address: remote server ip or hostname :type port: int :param port: remote server port :type hostkey: :py:class:`paramiko.key.HostKey` :param hostkey: remote host ssh server key """ |
self.transport = transport
self.username = auth.username
self.address = address
self.port = port |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def start(self, local_port, remote_address, remote_port):
""" Start ssh tunnel type: local_port: int param: local_port: local tunnel endpoint ip binding type: remote_address: str param: remote_address: Remote tunnel endpoing ip binding type: remote_port: int param: remote_port: Remote tunnel endpoint port binding """ |
self.local_port = local_port
self.remote_address = remote_address
self.remote_port = remote_port
logger.debug(("Starting ssh tunnel {0}:{1}:{2} for "
"{3}@{4}".format(local_port, remote_address, remote_port,
self.username, self.address)))
self.forward = Forward(local_port,
remote_address,
remote_port,
self.transport)
self.forward.start() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cleanup(self):
""" Cleanup resources used during execution """ |
if self.local_port is not None:
logger.debug(("Stopping ssh tunnel {0}:{1}:{2} for "
"{3}@{4}".format(self.local_port,
self.remote_address,
self.remote_port,
self.username,
self.address)))
if self.forward is not None:
self.forward.stop()
self.forward.join()
if self.transport is not None:
self.transport.close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def set_stream_logger(name='margaritashotgun', level=logging.INFO, format_string=None):
""" Add a stream handler for the provided name and level to the logging module. :type name: string :param name: Log name :type level: int :param level: Logging level :type format_string: str :param format_string: Log message format """ |
if format_string is None:
format_string = "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
time_format = "%Y-%m-%dT%H:%M:%S"
logger = logging.getLogger(name)
logger.setLevel(level)
handler = logging.StreamHandler()
handler.setLevel(level)
formatter = logging.Formatter(format_string, time_format)
handler.setFormatter(formatter)
logger.addHandler(handler)
paramiko_log_level = logging.CRITICAL
paramiko_log = logging.getLogger('paramiko')
paramiko_log.setLevel(paramiko_log_level)
paramiko_handler = logging.StreamHandler()
paramiko_handler.setLevel(paramiko_log_level)
paramiko_handler.setFormatter(formatter)
paramiko_log.addHandler(paramiko_handler) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self, username, password, key, address, port, jump_host):
""" Connect ssh tunnel and shell executor to remote host :type username: str :param username: username for authentication :type password: str :param password: password for authentication, may be used to unlock rsa key :type key: str :param key: path to rsa key for authentication :type address: str :param address: address for remote host :type port: int :param port: ssh port for remote host """ |
if port is None:
self.remote_port = 22
else:
self.remote_port = int(port)
auth = Auth(username=username, password=password, key=key)
if jump_host is not None:
jump_auth = Auth(username=jump_host['username'],
password=jump_host['password'],
key=jump_host['key'])
if jump_host['port'] is None:
jump_host['port'] = 22
else:
jump_auth = None
self.shell.connect(auth, address, self.remote_port, jump_host, jump_auth)
transport = self.shell.transport()
self.tunnel.configure(transport, auth, address, self.remote_port)
self.remote_addr = address |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def start_tunnel(self, local_port, remote_address, remote_port):
""" Start ssh forward tunnel :type local_port: int :param local_port: local port binding for ssh tunnel :type remote_address: str :param remote_address: remote tunnel endpoint bind address :type remote_port: int :param remote_port: remote tunnel endpoint bind port """ |
self.tunnel.start(local_port, remote_address, remote_port)
self.tunnel_port = local_port |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mem_size(self):
""" Returns the memory size in bytes of the remote host """ |
result = self.shell.execute(self.commands.mem_size.value)
stdout = self.shell.decode(result['stdout'])
stderr = self.shell.decode(result['stderr'])
return int(stdout) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def kernel_version(self):
""" Returns the kernel kernel version of the remote host """ |
result = self.shell.execute(self.commands.kernel_version.value)
stdout = self.shell.decode(result['stdout'])
stderr = self.shell.decode(result['stderr'])
return stdout |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def wait_for_lime(self, listen_port, listen_address="0.0.0.0", max_tries=20, wait=1):
""" Wait for lime to load unless max_retries is exceeded :type listen_port: int :param listen_port: port LiME is listening for connections on :type listen_address: str :param listen_address: address LiME is listening for connections on :type max_tries: int :param max_tries: maximum number of checks that LiME has loaded :type wait: int :param wait: time to wait between checks """ |
tries = 0
pattern = self.commands.lime_pattern.value.format(listen_address,
listen_port)
lime_loaded = False
while tries < max_tries and lime_loaded is False:
lime_loaded = self.check_for_lime(pattern)
tries = tries + 1
time.sleep(wait)
return lime_loaded |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_for_lime(self, pattern):
""" Check to see if LiME has loaded on the remote system :type pattern: str :param pattern: pattern to check output against :type listen_port: int :param listen_port: port LiME is listening for connections on """ |
check = self.commands.lime_check.value
lime_loaded = False
result = self.shell.execute(check)
stdout = self.shell.decode(result['stdout'])
connections = self.net_parser.parse(stdout)
for conn in connections:
local_addr, remote_addr = conn
if local_addr == pattern:
lime_loaded = True
break
return lime_loaded |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def upload_module(self, local_path=None, remote_path="/tmp/lime.ko"):
""" Upload LiME kernel module to remote host :type local_path: str :param local_path: local path to lime kernel module :type remote_path: str :param remote_path: remote path to upload lime kernel module """ |
if local_path is None:
raise FileNotFoundFoundError(local_path)
self.shell.upload_file(local_path, remote_path) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_lime(self, remote_path, listen_port, dump_format='lime'):
""" Load LiME kernel module from remote filesystem :type remote_path: str :param remote_path: path to LiME kernel module on remote host :type listen_port: int :param listen_port: port LiME uses to listen to remote connections :type dump_format: str :param dump_format: LiME memory dump file format """ |
load_command = self.commands.load_lime.value.format(remote_path,
listen_port,
dump_format)
self.shell.execute_async(load_command) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cleanup(self):
""" Release resources used by supporting classes """ |
try:
self.unload_lime()
except AttributeError as ex:
pass
self.tunnel.cleanup()
self.shell.cleanup() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect(self, auth, address, port, jump_host, jump_auth):
""" Creates an ssh session to a remote host :type auth: :py:class:`margaritashotgun.auth.AuthMethods` :param auth: Authentication object :type address: str :param address: remote server address :type port: int :param port: remote server port """ |
try:
self.target_address = address
sock = None
if jump_host is not None:
self.jump_host_ssh = paramiko.SSHClient()
self.jump_host_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.connect_with_auth(self.jump_host_ssh, jump_auth,
jump_host['addr'], jump_host['port'], sock)
transport = self.jump_host_ssh.get_transport()
dest_addr = (address, port)
jump_addr = (jump_host['addr'], jump_host['port'])
channel = transport.open_channel('direct-tcpip', dest_addr,
jump_addr)
self.connect_with_auth(self.ssh, auth, address, port, channel)
else:
self.connect_with_auth(self.ssh, auth, address, port, sock)
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
raise SSHConnectionError("{0}:{1}".format(address, port), ex) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect_with_password(self, ssh, username, password, address, port, sock, timeout=20):
""" Create an ssh session to a remote host with a username and password :type username: str :param username: username used for ssh authentication :type password: str :param password: password used for ssh authentication :type address: str :param address: remote server address :type port: int :param port: remote server port """ |
ssh.connect(username=username,
password=password,
hostname=address,
port=port,
sock=sock,
timeout=timeout) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def connect_with_key(self, ssh, username, key, address, port, sock, timeout=20):
""" Create an ssh session to a remote host with a username and rsa key :type username: str :param username: username used for ssh authentication :type key: :py:class:`paramiko.key.RSAKey` :param key: paramiko rsa key used for ssh authentication :type address: str :param address: remote server address :type port: int :param port: remote server port """ |
ssh.connect(hostname=address,
port=port,
username=username,
pkey=key,
sock=sock,
timeout=timeout) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def execute(self, command):
""" Executes command on remote hosts :type command: str :param command: command to be run on remote host """ |
try:
if self.ssh.get_transport() is not None:
logger.debug('{0}: executing "{1}"'.format(self.target_address,
command))
stdin, stdout, stderr = self.ssh.exec_command(command)
return dict(zip(['stdin', 'stdout', 'stderr'],
[stdin, stdout, stderr]))
else:
raise SSHConnectionError(self.target_address,
"ssh transport is closed")
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
logger.critical(("{0} execution failed on {1} with exception:"
"{2}".format(command, self.target_address,
ex)))
raise SSHCommandError(self.target_address, command, ex) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def execute_async(self, command, callback=None):
""" Executes command on remote hosts without blocking :type command: str :param command: command to be run on remote host :type callback: function :param callback: function to call when execution completes """ |
try:
logger.debug(('{0}: execute async "{1}"'
'with callback {2}'.format(self.target_address,
command,
callback)))
future = self.executor.submit(self.execute, command)
if callback is not None:
future.add_done_callback(callback)
return future
except (AuthenticationException, SSHException,
ChannelException, SocketError) as ex:
logger.critical(("{0} execution failed on {1} with exception:"
"{2}".format(command, self.target_address,
ex)))
raise SSHCommandError(self.target_address, command, ex) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def decode(self, stream, encoding='utf-8'):
""" Convert paramiko stream into a string :type stream: :param stream: stream to convert :type encoding: str :param encoding: stream encoding """ |
data = stream.read().decode(encoding).strip("\n")
if data != "":
logger.debug(('{0}: decoded "{1}" with encoding '
'{2}'.format(self.target_address, data, encoding)))
return data |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def upload_file(self, local_path, remote_path):
""" Upload a file from the local filesystem to the remote host :type local_path: str :param local_path: path of local file to upload :type remote_path: str :param remote_path: destination path of upload on remote host """ |
logger.debug("{0}: uploading {1} to {0}:{2}".format(self.target_address,
local_path,
remote_path))
try:
sftp = paramiko.SFTPClient.from_transport(self.transport())
sftp.put(local_path, remote_path)
sftp.close()
except SSHException as ex:
logger.warn(("{0}: LiME module upload failed with exception:"
"{1}".format(self.target_address, ex))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cleanup(self):
""" Release resources used during shell execution """ |
for future in self.futures:
future.cancel()
self.executor.shutdown(wait=10)
if self.ssh.get_transport() != None:
self.ssh.close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_args(self, args):
""" Parse arguments and return an arguments object :type args: list :param args: list of arguments """ |
parser = argparse.ArgumentParser(
description='Remote memory aquisition wrapper for LiME')
root = parser.add_mutually_exclusive_group(required=True)
root.add_argument('-c', '--config', help='path to config.yml')
root.add_argument('--server',
help='hostname or ip of target server')
root.add_argument('--version', action='version',
version="%(prog)s {ver}".format(ver=__version__))
opts = parser.add_argument_group()
opts.add_argument('--port', help='ssh port on remote server')
opts.add_argument('--username',
help='username for ssh connection to target server')
opts.add_argument('--module',
help='path to kernel lime kernel module')
opts.add_argument('--password',
help='password for user or encrypted keyfile')
opts.add_argument('--key',
help='path to rsa key for ssh connection')
opts.add_argument('--jump-server',
help='hostname or ip of jump server')
opts.add_argument('--jump-port',
help='ssh port on jump server')
opts.add_argument('--jump-username',
help='username for ssh connection to jump server')
opts.add_argument('--jump-password',
help='password for jump-user or encrypted keyfile')
opts.add_argument('--jump-key',
help='path to rsa key for ssh connection to jump server')
opts.add_argument('--filename',
help='memory dump filename')
opts.add_argument('--repository', action='store_true',
help='enable automatic kernel module downloads')
opts.add_argument('--repository-url',
help='kernel module repository url')
opts.add_argument('--repository-manifest',
help='specify alternate repository manifest')
opts.add_argument('--gpg-no-verify', dest='gpg_verify',
action='store_false',
help='skip lime module gpg signature check')
opts.add_argument('--workers', default=1,
help=('number of workers to run in parallel,'
'default: auto acceptable values are'
'(INTEGER | "auto")'))
opts.add_argument('--verbose', action='store_true',
help='log debug messages')
opts.set_defaults(repository_manifest='primary')
opts.set_defaults(gpg_verify=True)
output = parser.add_mutually_exclusive_group(required=False)
output.add_argument('--bucket',
help='memory dump output bucket')
output.add_argument('--output-dir',
help='memory dump output directory')
log = parser.add_argument_group()
log.add_argument('--log-dir',
help='log directory')
log.add_argument('--log-prefix',
help='log file prefix')
return parser.parse_args(args) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def configure(self, arguments=None, config=None):
""" Merge command line arguments, config files, and default configs :type arguments: argparse.Namespace :params arguments: Arguments produced by Cli.parse_args :type config: dict :params config: configuration dict to merge and validate """ |
if arguments is not None:
args_config = self.configure_args(arguments)
base_config = copy.deepcopy(default_config)
working_config = self.merge_config(base_config, args_config)
if config is not None:
self.validate_config(config)
base_config = copy.deepcopy(default_config)
working_config = self.merge_config(base_config, config)
# override configuration with environment variables
repo = self.get_env_default('LIME_REPOSITORY', 'disabled')
repo_url = self.get_env_default('LIME_REPOSITORY_URL',
working_config['repository']['url'])
if repo.lower() == 'enabled':
working_config['repository']['enabled'] = True
working_config['repository']['url'] = repo_url
return working_config |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_env_default(self, variable, default):
""" Fetch environment variables, returning a default if not found """ |
if variable in os.environ:
env_var = os.environ[variable]
else:
env_var = default
return env_var |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def configure_args(self, arguments):
""" Create configuration has from command line arguments :type arguments: :py:class:`argparse.Namespace` :params arguments: arguments produced by :py:meth:`Cli.parse_args()` """ |
module, key, config_path = self.check_file_paths(arguments.module,
arguments.key,
arguments.config)
log_dir = self.check_directory_paths(arguments.log_dir)
if arguments.repository_url is None:
url = default_config['repository']['url']
else:
url = arguments.repository_url
args_config = dict(aws=dict(bucket=arguments.bucket),
logging=dict(dir=arguments.log_dir,
prefix=arguments.log_prefix),
workers=arguments.workers,
repository=dict(enabled=arguments.repository,
url=url,
manifest=arguments.repository_manifest,
gpg_verify=arguments.gpg_verify))
if arguments.server is not None:
jump_host = None
if arguments.jump_server is not None:
if arguments.jump_port is not None:
jump_port = int(arguments.jump_port)
else:
jump_port = None
jump_host = dict(zip(jump_host_allowed_keys,
[arguments.jump_server,
jump_port,
arguments.jump_username,
arguments.jump_password,
arguments.jump_key]))
if arguments.port is not None:
port = int(arguments.port)
else:
port = None
host = dict(zip(host_allowed_keys,
[arguments.server, port, arguments.username,
arguments.password, module, key,
arguments.filename, jump_host]))
args_config['hosts'] = []
args_config['hosts'].append(host)
if config_path is not None:
try:
config = self.load_config(config_path)
self.validate_config(config)
args_config.update(config)
except YAMLError as ex:
logger.warn('Invalid yaml Format: {0}'.format(ex))
raise
except InvalidConfigurationError as ex:
logger.warn(ex)
raise
return args_config |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_file_paths(self, *args):
""" Ensure all arguments provided correspond to a file """ |
for path in enumerate(args):
path = path[1]
if path is not None:
try:
self.check_file_path(path)
except OSError as ex:
logger.warn(ex)
raise
return args |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_file_path(self, path):
""" Ensure file exists at the provided path :type path: string :param path: path to directory to check """ |
if os.path.exists(path) is not True:
msg = "File Not Found {}".format(path)
raise OSError(msg) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_directory_paths(self, *args):
""" Ensure all arguments correspond to directories """ |
for path in enumerate(args):
path = path[1]
if path is not None:
try:
self.check_directory_path(path)
except OSError as ex:
logger.warn(ex)
raise
return args |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_directory_path(self, path):
""" Ensure directory exists at the provided path :type path: string :param path: path to directory to check """ |
if os.path.isdir(path) is not True:
msg = "Directory Does Not Exist {}".format(path)
raise OSError(msg) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def validate_config(self, config):
""" Validate configuration dict keys are supported :type config: dict :param config: configuration dictionary """ |
try:
hosts = config['hosts']
except KeyError:
raise InvalidConfigurationError('hosts', "",
reason=('hosts configuration '
'section is required'))
for key in config.keys():
if key not in default_allowed_keys:
raise InvalidConfigurationError(key, config[key])
bucket = False
# optional configuration
try:
for key in config['aws'].keys():
if key == 'bucket' and config['aws'][key] is not None:
bucket = True
if key not in aws_allowed_keys:
raise InvalidConfigurationError(key, config['aws'][key])
except KeyError:
pass
# optional configuration
try:
for key in config['logging'].keys():
if key not in logging_allowed_keys:
raise InvalidConfigurationError(key, config['logging'][key])
except KeyError:
pass
# optional configuration
try:
for key in config['repository'].keys():
if key not in repository_allowed_keys:
raise InvalidConfigurationError(key, config['repository'][key])
except KeyError:
pass
# required configuration
if type(config['hosts']) is not list:
raise InvalidConfigurationError('hosts', config['hosts'],
reason="hosts must be a list")
filename = False
for host in config['hosts']:
for key in host.keys():
if key == 'filename' and host['filename'] is not None:
filename = True
if key == 'jump_host' and host['jump_host'] is not None:
for jump_key in host['jump_host'].keys():
if jump_key not in jump_host_allowed_keys:
raise InvalidConfigurationError(key, host['jump_host'])
if key not in host_allowed_keys:
raise InvalidConfigurationError(key, host[key])
if bucket and filename:
raise InvalidConfigurationError('bucket', config['aws']['bucket'],
reason=('bucket configuration is'
'incompatible with filename'
'configuration in hosts')) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def load_key(self, key_path, password):
""" Creates paramiko rsa key :type key_path: str :param key_path: path to rsa key :type password: str :param password: password to try if rsa key is encrypted """ |
try:
return paramiko.RSAKey.from_private_key_file(key_path)
except PasswordRequiredException as ex:
return paramiko.RSAKey.from_private_key_file(key_path,
password=password) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run(self):
""" Captures remote hosts memory """ |
logger = logging.getLogger(__name__)
try:
# Check repository GPG settings before starting workers
# Handling this here prevents subprocesses from needing stdin access
repo_conf = self.config['repository']
repo = None
if repo_conf['enabled'] and repo_conf['gpg_verify']:
try:
repo = Repository(repo_conf['url'],
repo_conf['gpg_verify'])
repo.init_gpg()
except Exception as ex:
# Do not prompt to install gpg keys unless running interactively
if repo is not None and self.library is False:
if isinstance(ex, RepositoryUntrustedSigningKeyError):
installed = repo.prompt_for_install()
if installed is False:
logger.critical(("repository signature not "
"installed, install the "
"signature manually or use "
"the --gpg-no-verify flag "
"to bypass this check"))
quit(1)
else:
logger.critical(ex)
quit(1)
conf = self.map_config()
workers = Workers(conf, self.config['workers'], name=self.name, library=self.library)
description = 'memory capture action'
results = workers.spawn(description)
self.statistics(results)
if self.library is True:
return dict([('total', self.total),
('completed', self.completed_addresses),
('failed', self.failed_addresses)])
else:
logger.info(("{0} hosts processed. completed: {1} "
"failed {2}".format(self.total, self.completed,
self.failed)))
logger.info("completed_hosts: {0}".format(self.completed_addresses))
logger.info("failed_hosts: {0}".format(self.failed_addresses))
quit()
except KeyboardInterrupt:
workers.cleanup(terminate=True)
if self.library:
raise
else:
quit(1) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def capture(self, tunnel_addr, tunnel_port, filename=None, bucket=None, destination=None):
""" Captures memory based on the provided OutputDestination :type tunnel_addr: str :param tunnel_port: ssh tunnel hostname or ip :type tunnel_port: int :param tunnel_port: ssh tunnel port :type filename: str :param filename: memory dump output filename :type bucket: str :param bucket: output s3 bucket :type destination: :py:class:`margaritashotgun.memory.OutputDestinations` :param destination: OutputDestinations member """ |
if filename is None:
raise MemoryCaptureAttributeMissingError('filename')
if destination == OutputDestinations.local:
logger.info("{0}: dumping to file://{1}".format(self.remote_addr,
filename))
result = self.to_file(filename, tunnel_addr, tunnel_port)
elif destination == OutputDestinations.s3:
if bucket is None:
raise MemoryCaptureAttributeMissingError('bucket')
logger.info(("{0}: dumping memory to s3://{1}/"
"{2}".format(self.remote_addr, bucket, filename)))
result = self.to_s3(bucket, filename, tunnel_addr, tunnel_port)
else:
raise MemoryCaptureOutputMissingError(self.remote_addr)
return result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def to_file(self, filename, tunnel_addr, tunnel_port):
""" Writes memory dump to a local file :type filename: str :param filename: memory dump output filename :type tunnel_addr: str :param tunnel_port: ssh tunnel hostname or ip :type tunnel_port: int :param tunnel_port: ssh tunnel port """ |
if self.progressbar:
self.bar = ProgressBar(widgets=self.widgets,
maxval=self.max_size).start()
self.bar.start()
with open(filename, 'wb') as self.outfile:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((tunnel_addr, tunnel_port))
self.sock.settimeout(self.sock_timeout)
bytes_since_update = 0
while True:
try:
data = self.sock.recv(self.recv_size)
data_length = len(data)
if not data:
break
self.outfile.write(data)
self.transfered = self.transfered + data_length
bytes_since_update += data_length
data = None
data_length = 0
if bytes_since_update > self.update_threshold:
self.update_progress()
bytes_since_update = 0
except (socket.timeout, socket.error) as ex:
if isinstance(ex, socket.timeout):
break
elif isinstance(ex, socket.error):
if ex.errno == errno.EINTR:
pass
else:
self.cleanup()
raise
else:
self.cleanup()
raise
self.cleanup()
logger.info('{0}: capture complete: {1}'.format(self.remote_addr,
filename))
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def update_progress(self, complete=False):
""" Logs capture progress :type complete: bool :params complete: toggle to finish ncurses progress bar """ |
if self.progressbar:
try:
self.bar.update(self.transfered)
except Exception as e:
logger.debug("{0}: {1}, {2} exceeds memsize {3}".format(
self.remote_addr,
e,
self.transfered,
self.max_size))
if complete:
self.bar.update(self.max_size)
self.bar.finish()
else:
percent = int(100 * float(self.transfered) / float(self.max_size))
# printe a message at 10%, 20%, etc...
if percent % 10 == 0:
if self.progress != percent:
logger.info("{0}: capture {1}% complete".format(
self.remote_addr, percent))
self.progress = percent |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def cleanup(self):
""" Release resources used during memory capture """ |
if self.sock is not None:
self.sock.close()
if self.outfile is not None:
self.outfile.close()
if self.bar is not None:
self.update_progress(complete=True) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init_gpg(self):
""" Initialize gpg object and check if repository signing key is trusted """ |
if self.gpg_verify:
logger.debug("gpg verification enabled, initializing gpg")
gpg_home = os.path.expanduser('~/.gnupg')
self.gpg = gnupg.GPG(gnupghome=gpg_home)
self.key_path, self.key_info = self.get_signing_key()
logger.debug("{0} {1}".format(self.key_path, self.key_info))
self.check_signing_key() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_signing_key(self):
""" Download a local copy of repo signing key for installation """ |
"""
Download a local copy of repo signing key key metadata.
Fixes #17 Scan Keys no available in all GPG versions.
"""
tmp_key_path = "/tmp/{0}".format(self.repo_signing_key)
tmp_metadata_path = "/tmp/{0}".format(self.key_metadata)
repo_key_path = "{0}/{1}".format(self.url, self.repo_signing_key)
repo_metadata_path = "{0}/{1}".format(self.url, self.key_metadata)
req_key = requests.get(repo_key_path)
req_metadata = requests.get(repo_metadata_path)
# Fetch the key to disk
if req_key.status_code is 200:
logger.debug(("found repository signing key at "
"{0}".format(repo_key_path)))
self.raw_key = req_key.content
with open(tmp_key_path, 'wb') as f:
f.write(self.raw_key)
else:
raise RepositoryMissingSigningKeyError(repo_key_path)
# Fetch the fingerprint from the metadata
if req_metadata.status_code is 200:
logger.debug(("found key metadata at "
"{0}".format(repo_metadata_path)))
print(req_metadata.content)
key_info = json.loads(req_metadata.content.decode('utf-8'))
else:
RepositoryMissingKeyMetadataError
return (tmp_key_path, key_info) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_signing_key(self):
""" Check that repo signing key is trusted by gpg keychain """ |
user_keys = self.gpg.list_keys()
if len(user_keys) > 0:
trusted = False
for key in user_keys:
if key['fingerprint'] == self.key_info['fingerprint']:
trusted = True
logger.debug(("repo signing key trusted in user keyring, "
"fingerprint {0}".format(key['fingerprint'])))
else:
trusted = False
if trusted is False:
repo_key_url = "{0}/{1}".format(self.url, self.repo_signing_key)
raise RepositoryUntrustedSigningKeyError(repo_key_url,
self.key_info['fingerprint']) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prompt_for_install(self):
""" Prompt user to install untrusted repo signing key """ |
print(self.key_info)
repo_key_url = "{0}/{1}".format(self.url, self.repo_signing_key)
print(("warning: Repository key untrusted \n"
"Importing GPG key 0x{0}:\n"
" Userid: \"{1}\"\n"
" From : {2}".format(self.key_info['fingerprint'],
self.key_info['uids'][0],
repo_key_url)))
response = prompt(u'Is this ok: [y/N] ')
if response == 'y':
self.install_key(self.raw_key)
return True
else:
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def install_key(self, key_data):
""" Install untrusted repo signing key """ |
logger.info(("importing repository signing key {0} "
"{1}".format(self.key_info['fingerprint'],
self.key_info['uids'][0])))
import_result = self.gpg.import_keys(key_data)
logger.debug("import results: {0}".format(import_result.results)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fetch(self, kernel_version, manifest_type):
""" Search repository for kernel module matching kernel_version :type kernel_version: str :param kernel_version: kernel version to search repository on :type manifest_type: str :param manifest_type: kernel module manifest to search on """ |
metadata = self.get_metadata()
logger.debug("parsed metadata: {0}".format(metadata))
manifest = self.get_manifest(metadata['manifests'][manifest_type])
try:
module = manifest[kernel_version]
logger.debug("found module {0}".format(module))
except KeyError:
raise KernelModuleNotFoundError(kernel_version, self.url)
path = self.fetch_module(module)
return path |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_metadata(self):
""" Fetch repository repomd.xml file """ |
metadata_path = "{}/{}/{}".format(self.url,
self.metadata_dir,
self.metadata_file)
metadata_sig_path = "{}/{}/{}.sig".format(self.url.rstrip('/'),
self.metadata_dir,
self.metadata_file)
# load metadata
req = requests.get(metadata_path)
if req.status_code is 200:
raw_metadata = req.content
else:
raise RepositoryError(metadata_path, ("status code not 200: "
"{}".format(req.status_code)))
if self.gpg_verify:
self.verify_data_signature(metadata_sig_path, metadata_path,
raw_metadata)
return self.parse_metadata(raw_metadata) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_metadata(self, metadata_xml):
""" Parse repomd.xml file :type metadata_xml: str :param metadata_xml: raw xml representation of repomd.xml """ |
try:
metadata = dict()
mdata = xmltodict.parse(metadata_xml)['metadata']
metadata['revision'] = mdata['revision']
metadata['manifests'] = dict()
# check if multiple manifests are present
if type(mdata['data']) is list:
manifests = mdata['data']
else:
manifests = [mdata['data']]
for manifest in manifests:
manifest_dict = dict()
manifest_dict['type'] = manifest['@type']
manifest_dict['checksum'] = manifest['checksum']
manifest_dict['open_checksum'] = manifest['open_checksum']
manifest_dict['location'] = manifest['location']['@href']
manifest_dict['timestamp'] = datetime.fromtimestamp(
int(manifest['timestamp']))
manifest_dict['size'] = int(manifest['size'])
manifest_dict['open_size'] = int(manifest['open_size'])
metadata['manifests'][manifest['@type']] = manifest_dict
except Exception as e:
raise RepositoryError("{0}/{1}".format(self.url,self.metadata_dir,
self.metadata_file), e)
return metadata |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def get_manifest(self, metadata):
""" Get latest manifest as specified in repomd.xml :type metadata: dict :param metadata: dictionary representation of repomd.xml """ |
manifest_path = "{0}/{1}".format(self.url, metadata['location'])
req = requests.get(manifest_path, stream=True)
if req.status_code is 200:
gz_manifest = req.raw.read()
self.verify_checksum(gz_manifest, metadata['checksum'],
metadata['location'])
manifest = self.unzip_manifest(gz_manifest)
self.verify_checksum(manifest, metadata['open_checksum'],
metadata['location'].rstrip('.gz'))
return self.parse_manifest(manifest) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unzip_manifest(self, raw_manifest):
""" Decompress gzip encoded manifest :type raw_manifest: str :param raw_manifest: compressed gzip manifest file content """ |
buf = BytesIO(raw_manifest)
f = gzip.GzipFile(fileobj=buf)
manifest = f.read()
return manifest |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse_manifest(self, manifest_xml):
""" Parse manifest xml file :type manifest_xml: str :param manifest_xml: raw xml content of manifest file """ |
manifest = dict()
try:
mdata = xmltodict.parse(manifest_xml)['modules']['module']
for module in mdata:
mod = dict()
mod['type'] = module['@type']
mod['name'] = module['name']
mod['arch'] = module['arch']
mod['checksum'] = module['checksum']
mod['version'] = module['version']
mod['packager'] = module['packager']
mod['location'] = module['location']['@href']
mod['signature'] = module['signature']['@href']
mod['platform'] = module['platform']
manifest[mod['version']] = mod
except Exception as e:
raise
return manifest |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def fetch_module(self, module):
""" Download and verify kernel module :type module: str :param module: kernel module path """ |
tm = int(time.time())
datestamp = datetime.utcfromtimestamp(tm).isoformat()
filename = "lime-{0}-{1}.ko".format(datestamp, module['version'])
url = "{0}/{1}".format(self.url, module['location'])
logger.info("downloading {0} as {1}".format(url, filename))
req = requests.get(url, stream=True)
with open(filename, 'wb') as f:
f.write(req.raw.read())
self.verify_module(filename, module, self.gpg_verify)
return filename |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def verify_module(self, filename, module, verify_signature):
""" Verify kernel module checksum and signature :type filename: str :param filename: downloaded kernel module path :type module: dict :param module: kernel module metadata :type verify_signature: bool :param verify_signature: enable/disable signature verification """ |
with open(filename, 'rb') as f:
module_data = f.read()
self.verify_checksum(module_data, module['checksum'],
module['location'])
if self.gpg_verify:
signature_url = "{0}/{1}".format(self.url, module['signature'])
file_url = "{0}/{1}".format(self.url, module['location'])
self.verify_file_signature(signature_url, file_url, filename) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def verify_checksum(self, data, checksum, filename):
""" Verify sha256 checksum vs calculated checksum :type data: str :param data: data used to calculate checksum :type checksum: str :param checksum: expected checksum of data :type filename: str :param checksum: original filename """ |
calculated_checksum = hashlib.sha256(data).hexdigest()
logger.debug("calculated checksum {0} for {1}".format(calculated_checksum,
filename))
if calculated_checksum != checksum:
raise RepositoryError("{0}/{1}".format(self.url, filename),
("checksum verification failed, expected "
"{0} got {1}".format(checksum,
calculated_checksum))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def verify_data_signature(self, signature_url, data_url, data):
""" Verify data against it's remote signature :type signature_url: str :param signature_url: remote path to signature for data_url :type data_url: str :param data_url: url from which data was fetched :type data: str :param data: content of remote file at file_url """ |
req = requests.get(signature_url)
if req.status_code is 200:
tm = int(time.time())
datestamp = datetime.utcfromtimestamp(tm).isoformat()
sigfile = "repo-{0}-tmp.sig".format(datestamp)
logger.debug("writing {0} to {1}".format(signature_url, sigfile))
with open(sigfile, 'wb') as f:
f.write(req.content)
else:
raise RepositoryMissingSignatureError(signature_url)
verified = self.gpg.verify_data(sigfile, data)
try:
os.remove(sigfile)
except OSError:
pass
if verified.valid is True:
logger.debug("verified {0} against {1}".format(data_url,
signature_url))
else:
raise RepositorySignatureError(data_url, signature_url) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def verify_file_signature(self, signature_url, file_url, filename):
""" Verify a local file against it's remote signature :type signature_url: str :param signature_url: remote path to signature for file_url :type file_url: str :param file_url: url from which file at filename was fetched :type filename: str :param filename: filename of local file downloaded from file_url """ |
req = requests.get(signature_url, stream=True)
if req.status_code is 200:
sigfile = req.raw
else:
raise RepositoryMissingSignatureError(signature_url)
verified = self.gpg.verify_file(sigfile, filename)
if verified.valid is True:
logger.debug("verified {0} against {1}".format(filename, signature_url))
else:
raise RepositorySignatureError(file_url, signature_url) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unpack_rsp(cls, rsp_pb):
"""Unpack the init connect response""" |
ret_type = rsp_pb.retType
ret_msg = rsp_pb.retMsg
if ret_type != RET_OK:
return RET_ERROR, ret_msg, None
res = {}
if rsp_pb.HasField('s2c'):
res['server_version'] = rsp_pb.s2c.serverVer
res['login_user_id'] = rsp_pb.s2c.loginUserID
res['conn_id'] = rsp_pb.s2c.connID
res['conn_key'] = rsp_pb.s2c.connAESKey
res['keep_alive_interval'] = rsp_pb.s2c.keepAliveInterval
else:
return RET_ERROR, "rsp_pb error", None
return RET_OK, "", res |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unpack_unsubscribe_rsp(cls, rsp_pb):
"""Unpack the un-subscribed response""" |
if rsp_pb.retType != RET_OK:
return RET_ERROR, rsp_pb.retMsg, None
return RET_OK, "", None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def dict2pb(cls, adict, strict=False):
""" Takes a class representing the ProtoBuf Message and fills it with data from the dict. """ |
obj = cls()
for field in obj.DESCRIPTOR.fields:
if not field.label == field.LABEL_REQUIRED:
continue
if not field.has_default_value:
continue
if not field.name in adict:
raise ConvertException('Field "%s" missing from descriptor dictionary.'
% field.name)
field_names = set([field.name for field in obj.DESCRIPTOR.fields])
if strict:
for key in adict.keys():
if key not in field_names:
raise ConvertException(
'Key "%s" can not be mapped to field in %s class.'
% (key, type(obj)))
for field in obj.DESCRIPTOR.fields:
if not field.name in adict:
continue
msg_type = field.message_type
if field.label == FD.LABEL_REPEATED:
if field.type == FD.TYPE_MESSAGE:
for sub_dict in adict[field.name]:
item = getattr(obj, field.name).add()
item.CopyFrom(dict2pb(msg_type._concrete_class, sub_dict))
else:
# fix python3 map用法变更
list(map(getattr(obj, field.name).append, adict[field.name]))
else:
if field.type == FD.TYPE_MESSAGE:
value = dict2pb(msg_type._concrete_class, adict[field.name])
getattr(obj, field.name).CopyFrom(value)
elif field.type in [FD.TYPE_UINT64, FD.TYPE_INT64, FD.TYPE_SINT64]:
setattr(obj, field.name, int(adict[field.name]))
else:
setattr(obj, field.name, adict[field.name])
return obj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def pb2dict(obj):
""" Takes a ProtoBuf Message obj and convertes it to a dict. """ |
adict = {}
if not obj.IsInitialized():
return None
for field in obj.DESCRIPTOR.fields:
if not getattr(obj, field.name):
continue
if not field.label == FD.LABEL_REPEATED:
if not field.type == FD.TYPE_MESSAGE:
adict[field.name] = getattr(obj, field.name)
else:
value = pb2dict(getattr(obj, field.name))
if value:
adict[field.name] = value
else:
if field.type == FD.TYPE_MESSAGE:
adict[field.name] = \
[pb2dict(v) for v in getattr(obj, field.name)]
else:
adict[field.name] = [v for v in getattr(obj, field.name)]
return adict |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def json2pb(cls, json, strict=False):
""" Takes a class representing the Protobuf Message and fills it with data from the json string. """ |
return dict2pb(cls, simplejson.loads(json), strict) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _split_stock_code(self, code):
stock_str = str(code)
split_loc = stock_str.find(".")
'''do not use the built-in split function in python.
The built-in function cannot handle some stock strings correctly.
for instance, US..DJI, where the dot . itself is a part of original code'''
if 0 <= split_loc < len(
stock_str) - 1 and stock_str[0:split_loc] in MKT_MAP:
market_str = stock_str[0:split_loc]
partial_stock_str = stock_str[split_loc + 1:]
return RET_OK, (market_str, partial_stock_str)
else:
error_str = ERROR_STR_PREFIX + "format of %s is wrong. (US.AAPL, HK.00700, SZ.000001)" % stock_str
return RET_ERROR, error_str |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def position_list_query(self, code='', pl_ratio_min=None, pl_ratio_max=None, trd_env=TrdEnv.REAL, acc_id=0, acc_index=0):
"""for querying the position list""" |
ret, msg = self._check_trd_env(trd_env)
if ret != RET_OK:
return ret, msg
ret, msg, acc_id = self._check_acc_id_and_acc_index(trd_env, acc_id, acc_index)
if ret != RET_OK:
return ret, msg
ret, msg, stock_code = self._check_stock_code(code)
if ret != RET_OK:
return ret, msg
query_processor = self._get_sync_query_processor(
PositionListQuery.pack_req, PositionListQuery.unpack_rsp)
kargs = {
'code': str(stock_code),
'pl_ratio_min': pl_ratio_min,
'pl_ratio_max': pl_ratio_max,
'trd_mkt': self.__trd_mkt,
'trd_env': trd_env,
'acc_id': acc_id,
'conn_id': self.get_sync_conn_id()
}
ret_code, msg, position_list = query_processor(**kargs)
if ret_code != RET_OK:
return RET_ERROR, msg
col_list = [
"code", "stock_name", "qty", "can_sell_qty", "cost_price",
"cost_price_valid", "market_val", "nominal_price", "pl_ratio",
"pl_ratio_valid", "pl_val", "pl_val_valid", "today_buy_qty",
"today_buy_val", "today_pl_val", "today_sell_qty", "today_sell_val",
"position_side"
]
position_list_table = pd.DataFrame(position_list, columns=col_list)
return RET_OK, position_list_table |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def deal_list_query(self, code="", trd_env=TrdEnv.REAL, acc_id=0, acc_index=0):
"""for querying deal list""" |
ret, msg = self._check_trd_env(trd_env)
if ret != RET_OK:
return ret, msg
ret, msg, acc_id = self._check_acc_id_and_acc_index(trd_env, acc_id, acc_index)
if ret != RET_OK:
return ret, msg
ret, msg, stock_code = self._check_stock_code(code)
if ret != RET_OK:
return ret, msg
query_processor = self._get_sync_query_processor(
DealListQuery.pack_req, DealListQuery.unpack_rsp)
kargs = {
'code': stock_code,
'trd_mkt': self.__trd_mkt,
'trd_env': trd_env,
'acc_id': acc_id,
'conn_id': self.get_sync_conn_id()
}
ret_code, msg, deal_list = query_processor(**kargs)
if ret_code != RET_OK:
return RET_ERROR, msg
col_list = [
"code", "stock_name", "deal_id", "order_id", "qty", "price",
"trd_side", "create_time", "counter_broker_id", "counter_broker_name"
]
deal_list_table = pd.DataFrame(deal_list, columns=col_list)
return RET_OK, deal_list_table |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_sock_ok(self, timeout_select):
"""check if socket is OK""" |
self._socket_lock.acquire()
try:
ret = self._is_socket_ok(timeout_select)
finally:
self._socket_lock.release()
return ret |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_date_str_format(s, default_time="00:00:00"):
"""Check the format of date string""" |
try:
str_fmt = s
if ":" not in s:
str_fmt = '{} {}'.format(s, default_time)
dt_obj = datetime.strptime(str_fmt, "%Y-%m-%d %H:%M:%S")
return RET_OK, dt_obj
except ValueError:
error_str = ERROR_STR_PREFIX + "wrong time or time format"
return RET_ERROR, error_str |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def normalize_date_format(date_str, default_time="00:00:00"):
"""normalize the format of data""" |
ret_code, ret_data = check_date_str_format(date_str, default_time)
if ret_code != RET_OK:
return ret_code, ret_data
return RET_OK, ret_data.strftime("%Y-%m-%d %H:%M:%S") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def extract_pls_rsp(rsp_str):
"""Extract the response of PLS""" |
try:
rsp = json.loads(rsp_str)
except ValueError:
traceback.print_exc()
err = sys.exc_info()[1]
err_str = ERROR_STR_PREFIX + str(err)
return RET_ERROR, err_str, None
error_code = int(rsp['retType'])
if error_code != 1:
error_str = ERROR_STR_PREFIX + rsp['retMsg']
return RET_ERROR, error_str, None
return RET_OK, "", rsp |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def split_stock_str(stock_str_param):
"""split the stock string""" |
stock_str = str(stock_str_param)
split_loc = stock_str.find(".")
'''do not use the built-in split function in python.
The built-in function cannot handle some stock strings correctly.
for instance, US..DJI, where the dot . itself is a part of original code'''
if 0 <= split_loc < len(
stock_str) - 1 and stock_str[0:split_loc] in MKT_MAP:
market_str = stock_str[0:split_loc]
market_code = MKT_MAP[market_str]
partial_stock_str = stock_str[split_loc + 1:]
return RET_OK, (market_code, partial_stock_str)
else:
error_str = ERROR_STR_PREFIX + "format of %s is wrong. (US.AAPL, HK.00700, SZ.000001)" % stock_str
return RET_ERROR, error_str |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.