INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
For each edge, link it to the vertex types it connects to each other.
def _link_vertex_and_edge_types(self): """For each edge, link it to the vertex types it connects to each other.""" for edge_class_name in self._edge_class_names: edge_element = self._elements[edge_class_name] if (EDGE_SOURCE_PROPERTY_NAME not in edge_element.properties or ...
Return True if the Filter block references no non-local fields, and False otherwise.
def _is_local_filter(filter_block): """Return True if the Filter block references no non-local fields, and False otherwise.""" # We need the "result" value of this function to be mutated within the "visitor_fn". # Since we support both Python 2 and Python 3, we can't use the "nonlocal" keyword here: # h...
Classify query locations into three groups: preferred, eligible, ineligible. - Ineligible locations are ones that cannot be the starting point of query execution. These include locations within recursions, locations that are the target of an optional traversal, and locations with an associated "where:"...
def _classify_query_locations(match_query): """Classify query locations into three groups: preferred, eligible, ineligible. - Ineligible locations are ones that cannot be the starting point of query execution. These include locations within recursions, locations that are the target of an optional t...
Return the GraphQL type bound at the given step, or None if no bound is given.
def _calculate_type_bound_at_step(match_step): """Return the GraphQL type bound at the given step, or None if no bound is given.""" current_type_bounds = [] if isinstance(match_step.root_block, QueryRoot): # The QueryRoot start class is a type bound. current_type_bounds.extend(match_step.ro...
Ensure that the two bounds either are an exact match, or one of them is None.
def _assert_type_bounds_are_not_conflicting(current_type_bound, previous_type_bound, location, match_query): """Ensure that the two bounds either are an exact match, or one of them is None.""" if all((current_type_bound is not None, previous_type_bound is ...
Return a MATCH query where only preferred locations are valid as query start locations.
def _expose_only_preferred_locations(match_query, location_types, coerced_locations, preferred_locations, eligible_locations): """Return a MATCH query where only preferred locations are valid as query start locations.""" preferred_location_types = dict() eligible_locatio...
Return a MATCH query where all eligible locations are valid as query start locations.
def _expose_all_eligible_locations(match_query, location_types, eligible_locations): """Return a MATCH query where all eligible locations are valid as query start locations.""" eligible_location_types = dict() new_match_traversals = [] for current_traversal in match_query.match_traversals: new_...
Ensure that OrientDB only considers desirable query start points in query planning.
def expose_ideal_query_execution_start_points(compound_match_query, location_types, coerced_locations): """Ensure that OrientDB only considers desirable query start points in query planning.""" new_queries = [] for match_query in compound_match_query.match_quer...
Return an Expression that is the `&&` of all the expressions in the given list.
def _expression_list_to_conjunction(expression_list): """Return an Expression that is the `&&` of all the expressions in the given list.""" if not isinstance(expression_list, list): raise AssertionError(u'Expected list. Received {}: ' u'{}'.format(type(expression_list).__nam...
Return a generator for expressions that are connected by `&&`s in the given expression.
def _extract_conjuction_elements_from_expression(expression): """Return a generator for expressions that are connected by `&&`s in the given expression.""" if isinstance(expression, BinaryComposition) and expression.operator == u'&&': for element in _extract_conjuction_elements_from_expression(expressio...
Construct a mapping from local fields to specified operators, and corresponding expressions. Args: expression_list: list of expressions to analyze Returns: local_field_to_expressions: dict mapping local field names to "operator -> list of BinaryComposition" dictionaries, ...
def _construct_field_operator_expression_dict(expression_list): """Construct a mapping from local fields to specified operators, and corresponding expressions. Args: expression_list: list of expressions to analyze Returns: local_field_to_expressions: dict mapping local field na...
Return a new expression, with any eligible comparisons lowered to `between` clauses.
def _lower_expressions_to_between(base_expression): """Return a new expression, with any eligible comparisons lowered to `between` clauses.""" expression_list = list(_extract_conjuction_elements_from_expression(base_expression)) if len(expression_list) == 0: raise AssertionError(u'Received empty exp...
Return a new MatchQuery, with all eligible comparison filters lowered to between clauses.
def lower_comparisons_to_between(match_query): """Return a new MatchQuery, with all eligible comparison filters lowered to between clauses.""" new_match_traversals = [] for current_match_traversal in match_query.match_traversals: new_traversal = [] for step in current_match_traversal: ...
Ensure that all arguments expected by the query were actually provided.
def _ensure_arguments_are_provided(expected_types, arguments): """Ensure that all arguments expected by the query were actually provided.""" # This function only checks that the arguments were specified, # and does not check types. Type checking is done as part of the actual formatting step. expected_ar...
Insert the arguments into the compiled GraphQL query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every parameter the query expects. Returns: string, a query in ...
def insert_arguments_into_query(compilation_result, arguments): """Insert the arguments into the compiled GraphQL query to form a complete query. Args: compilation_result: a CompilationResult object derived from the GraphQL compiler arguments: dict, mapping argument name to its value, for every...
Ensure that the QueryRoot block is valid.
def validate(self): """Ensure that the QueryRoot block is valid.""" if not (isinstance(self.start_class, set) and all(isinstance(x, six.string_types) for x in self.start_class)): raise TypeError(u'Expected set of string start_class, got: {} {}'.format( type(se...
Return a unicode object with the Gremlin representation of this block.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this block.""" self.validate() if len(self.start_class) == 1: # The official Gremlin documentation claims that this approach # is generally faster than the one below, since it makes using ...
Ensure that the CoerceType block is valid.
def validate(self): """Ensure that the CoerceType block is valid.""" if not (isinstance(self.target_class, set) and all(isinstance(x, six.string_types) for x in self.target_class)): raise TypeError(u'Expected set of string target_class, got: {} {}'.format( typ...
Ensure that the ConstructResult block is valid.
def validate(self): """Ensure that the ConstructResult block is valid.""" if not isinstance(self.fields, dict): raise TypeError(u'Expected dict fields, got: {} {}'.format( type(self.fields).__name__, self.fields)) for key, value in six.iteritems(self.fields): ...
Create an updated version (if needed) of the ConstructResult via the visitor pattern.
def visit_and_update_expressions(self, visitor_fn): """Create an updated version (if needed) of the ConstructResult via the visitor pattern.""" new_fields = {} for key, value in six.iteritems(self.fields): new_value = value.visit_and_update(visitor_fn) if new_value is no...
Return a unicode object with the Gremlin representation of this block.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this block.""" self.validate() template = ( u'transform{{' u'it, m -> new com.orientechnologies.orient.core.record.impl.ODocument([ {} ])' u'}}') field_representation...
Ensure that the Filter block is valid.
def validate(self): """Ensure that the Filter block is valid.""" if not isinstance(self.predicate, Expression): raise TypeError(u'Expected Expression predicate, got: {} {}'.format( type(self.predicate).__name__, self.predicate))
Create an updated version (if needed) of the Filter via the visitor pattern.
def visit_and_update_expressions(self, visitor_fn): """Create an updated version (if needed) of the Filter via the visitor pattern.""" new_predicate = self.predicate.visit_and_update(visitor_fn) if new_predicate is not self.predicate: return Filter(new_predicate) else: ...
Return a unicode object with the Gremlin representation of this block.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this block.""" self.validate() mark_name, _ = self.location.get_location_name() return u'as({})'.format(safe_quoted_string(mark_name))
Ensure that the Traverse block is valid.
def validate(self): """Ensure that the Traverse block is valid.""" if not isinstance(self.direction, six.string_types): raise TypeError(u'Expected string direction, got: {} {}'.format( type(self.direction).__name__, self.direction)) validate_edge_direction(self.direc...
Return a unicode object with the Gremlin representation of this block.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this block.""" self.validate() if self.optional: # Optional edges have to be handled differently than non-optionals, since the compiler # provides the guarantee that properties read from a...
Ensure that the Traverse block is valid.
def validate(self): """Ensure that the Traverse block is valid.""" validate_edge_direction(self.direction) validate_safe_string(self.edge_name) if not isinstance(self.within_optional_scope, bool): raise TypeError(u'Expected bool within_optional_scope, got: {} ' ...
Return a unicode object with the Gremlin representation of this block.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this block.""" self.validate() template = 'copySplit({recurse}).exhaustMerge' recurse_base = '_()' recurse_traversal = '.{direction}(\'{edge_name}\')'.format( direction=self.direction,...
Ensure that the Backtrack block is valid.
def validate(self): """Ensure that the Backtrack block is valid.""" validate_marked_location(self.location) if not isinstance(self.optional, bool): raise TypeError(u'Expected bool optional, got: {} {}'.format( type(self.optional).__name__, self.optional))
Return a unicode object with the Gremlin representation of this BasicBlock.
def to_gremlin(self): """Return a unicode object with the Gremlin representation of this BasicBlock.""" self.validate() if self.optional: operation = u'optional' else: operation = u'back' mark_name, _ = self.location.get_location_name() return u'...
Ensure the Fold block is valid.
def validate(self): """Ensure the Fold block is valid.""" if not isinstance(self.fold_scope_location, FoldScopeLocation): raise TypeError(u'Expected a FoldScopeLocation for fold_scope_location, got: {} ' u'{}'.format(type(self.fold_scope_location), self.fold_scope...
Lower the IR blocks into a form that can be represented by a SQL query. Args: ir_blocks: list of IR blocks to lower into SQL-compatible form query_metadata_table: QueryMetadataTable object containing all metadata collected during query processing, including location me...
def lower_ir(ir_blocks, query_metadata_table, type_equivalence_hints=None): """Lower the IR blocks into a form that can be represented by a SQL query. Args: ir_blocks: list of IR blocks to lower into SQL-compatible form query_metadata_table: QueryMetadataTable object containing all metadata col...
Validate that all IR blocks and ConstructResult fields passed to the backend are supported. Args: ir_blocks: List[BasicBlock], IR blocks to validate. query_metadata_table: QueryMetadataTable, object containing all metadata collected during query processing, including l...
def _validate_all_blocks_supported(ir_blocks, query_metadata_table): """Validate that all IR blocks and ConstructResult fields passed to the backend are supported. Args: ir_blocks: List[BasicBlock], IR blocks to validate. query_metadata_table: QueryMetadataTable, object containing all metadata ...
Return the ConstructResult block from a list of IR blocks.
def _get_construct_result(ir_blocks): """Return the ConstructResult block from a list of IR blocks.""" last_block = ir_blocks[-1] if not isinstance(last_block, blocks.ConstructResult): raise AssertionError( u'The last IR block {} for IR blocks {} was unexpectedly not ' u'a Co...
Create a map from each query path to a LocationInfo at that path. Args: query_metadata_table: QueryMetadataTable, object containing all metadata collected during query processing, including location metadata (e.g. which locations are folded or opt...
def _map_query_path_to_location_info(query_metadata_table): """Create a map from each query path to a LocationInfo at that path. Args: query_metadata_table: QueryMetadataTable, object containing all metadata collected during query processing, including location metadata (e...
Return True if LocationInfo objects are equivalent for the SQL backend, False otherwise. LocationInfo objects are considered equal for the SQL backend iff the optional scopes depth, recursive scopes depth, types and parent query paths are equal. Args: left: LocationInfo, left location info object ...
def _location_infos_equal(left, right): """Return True if LocationInfo objects are equivalent for the SQL backend, False otherwise. LocationInfo objects are considered equal for the SQL backend iff the optional scopes depth, recursive scopes depth, types and parent query paths are equal. Args: ...
Assign the output fields of a ConstructResult block to their respective query_path.
def _map_query_path_to_outputs(construct_result, query_path_to_location_info): """Assign the output fields of a ConstructResult block to their respective query_path.""" query_path_to_output_fields = {} for output_name, field in six.iteritems(construct_result.fields): field_name = field.location.fiel...
Associate each IR block with its corresponding location, by index.
def _map_block_index_to_location(ir_blocks): """Associate each IR block with its corresponding location, by index.""" block_index_to_location = {} # MarkLocation blocks occur after the blocks related to that location. # The core approach here is to buffer blocks until their MarkLocation is encountered ...
Raise exception if any unary transformation block encountered.
def lower_unary_transformations(ir_blocks): """Raise exception if any unary transformation block encountered.""" def visitor_fn(expression): """Raise error if current expression is a UnaryTransformation.""" if not isinstance(expression, expressions.UnaryTransformation): return expres...
Raise exception if an unsupported metafield is encountered in any LocalField expression.
def lower_unsupported_metafield_expressions(ir_blocks): """Raise exception if an unsupported metafield is encountered in any LocalField expression.""" def visitor_fn(expression): """Visitor function raising exception for any unsupported metafield.""" if not isinstance(expression, expressions.Loc...
Compile the GraphQL input using the schema into a MATCH query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_query: the GraphQL query to compile to MATCH, as a string parameters: dict, mapping argument name to its value,...
def graphql_to_match(schema, graphql_query, parameters, type_equivalence_hints=None): """Compile the GraphQL input using the schema into a MATCH query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_query: the GraphQL query t...
Compile the GraphQL input using the schema into a SQL query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_query: the GraphQL query to compile to SQL, as a string parameters: dict, mapping argument name to its value, for...
def graphql_to_sql(schema, graphql_query, parameters, compiler_metadata, type_equivalence_hints=None): """Compile the GraphQL input using the schema into a SQL query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried ...
Compile the GraphQL input using the schema into a Gremlin query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_query: the GraphQL query to compile to Gremlin, as a string parameters: dict, mapping argument name to its va...
def graphql_to_gremlin(schema, graphql_query, parameters, type_equivalence_hints=None): """Compile the GraphQL input using the schema into a Gremlin query and associated metadata. Args: schema: GraphQL schema object describing the schema of the graph to be queried graphql_query: the GraphQL que...
Construct a GraphQL schema from an OrientDB schema. Args: schema_data: list of dicts describing the classes in the OrientDB schema. The following format is the way the data is structured in OrientDB 2. See the README.md file for an example of how to query this data...
def get_graphql_schema_from_orientdb_schema_data(schema_data, class_to_field_type_overrides=None, hidden_classes=None): """Construct a GraphQL schema from an OrientDB schema. Args: schema_data: list of dicts describing the classes in the OrientDB schema....
Return a MATCH query string from a list of IR blocks.
def emit_code_from_ir(ir_blocks, compiler_metadata): """Return a MATCH query string from a list of IR blocks.""" gremlin_steps = ( block.to_gremlin() for block in ir_blocks ) # OutputSource blocks translate to empty steps. # Discard such empty steps so we don't end up with an incorr...
Start the built in webserver, bound to the host and port you'd like. Default host is `127.0.0.1` and port 8080. :param host: The host you want to bind the build in webserver to :param port: The port number you want the webserver to run on :param debug: Set to `True` to enable debug leve...
def start(self, host='127.0.0.1', port=None, debug=False, **kwargs): """ Start the built in webserver, bound to the host and port you'd like. Default host is `127.0.0.1` and port 8080. :param host: The host you want to bind the build in webserver to :param port: The port number ...
Logs in the user via given login and password.
def login(request): ''' Logs in the user via given login and password. ''' serializer_class = registration_settings.LOGIN_SERIALIZER_CLASS serializer = serializer_class(data=request.data) serializer.is_valid(raise_exception=True) user = serializer.get_authenticated_user() if not user: ...
Logs out the user. returns an error if the user is not authenticated.
def logout(request): ''' Logs out the user. returns an error if the user is not authenticated. ''' user = request.user serializer = LogoutSerializer(data=request.data) serializer.is_valid(raise_exception=True) data = serializer.validated_data if should_authenticate_session(): ...
Same as Django's standard shortcut, but make sure to also raise 404 if the filter_kwargs don't match the required types. This function was copied from rest_framework.generics because of issue #36.
def get_object_or_404(queryset, *filter_args, **filter_kwargs): """ Same as Django's standard shortcut, but make sure to also raise 404 if the filter_kwargs don't match the required types. This function was copied from rest_framework.generics because of issue #36. """ try: return _get_o...
Get or set user profile.
def profile(request): ''' Get or set user profile. ''' serializer_class = registration_settings.PROFILE_SERIALIZER_CLASS if request.method in ['POST', 'PUT', 'PATCH']: partial = request.method == 'PATCH' serializer = serializer_class( instance=request.user, da...
Register new user.
def register(request): ''' Register new user. ''' serializer_class = registration_settings.REGISTER_SERIALIZER_CLASS serializer = serializer_class(data=request.data) serializer.is_valid(raise_exception=True) kwargs = {} if registration_settings.REGISTER_VERIFICATION_ENABLED: ve...
Verify registration via signature.
def verify_registration(request): """ Verify registration via signature. """ user = process_verify_registration_data(request.data) extra_data = None if registration_settings.REGISTER_VERIFICATION_AUTO_LOGIN: extra_data = perform_login(request, user) return get_ok_response('User verif...
Return list of this package requirements via local filepath.
def get_requirements(requirements_filepath): ''' Return list of this package requirements via local filepath. ''' requirements = [] with open(os.path.join(ROOT_DIR, requirements_filepath), 'rt') as f: for line in f: if line.startswith('#'): continue li...
Change the user password.
def change_password(request): ''' Change the user password. ''' serializer = ChangePasswordSerializer(data=request.data, context={'request': request}) serializer.is_valid(raise_exception=True) user = request.user user.set_password(serializer.validat...
>>> from tests import doctest_utils >>> convert_html_to_text = registration_settings.VERIFICATION_EMAIL_HTML_TO_TEXT_CONVERTER # noqa: E501 >>> parse_template_config({}) # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... ImproperlyConfigured >>> parse_template_config({ ...
def parse_template_config(template_config_data): """ >>> from tests import doctest_utils >>> convert_html_to_text = registration_settings.VERIFICATION_EMAIL_HTML_TO_TEXT_CONVERTER # noqa: E501 >>> parse_template_config({}) # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ...
Send email with reset password link.
def send_reset_password_link(request): ''' Send email with reset password link. ''' if not registration_settings.RESET_PASSWORD_VERIFICATION_ENABLED: raise Http404() serializer = SendResetPasswordLinkSerializer(data=request.data) serializer.is_valid(raise_exception=True) login = seri...
r""" >>> convert_html_to_text( ... ''' ... <html><body> ... Look &amp; click ... <a href="https://example.com">here</a> ... </body></html>''', preserve_urls=True) 'Look & click here (https://example.com)' >>> convert_html_to_text( ... ''' ... <...
def convert_html_to_text(value, preserve_urls=False): r""" >>> convert_html_to_text( ... ''' ... <html><body> ... Look &amp; click ... <a href="https://example.com">here</a> ... </body></html>''', preserve_urls=True) 'Look & click here (https://example.com)' ...
Register new email.
def register_email(request): ''' Register new email. ''' user = request.user serializer = RegisterEmailSerializer(data=request.data) serializer.is_valid(raise_exception=True) email = serializer.validated_data['email'] template_config = ( registration_settings.REGISTER_EMAIL_VE...
This is sample for implement BOT in LINE group Invite your BOT to group, then BOT will auto accept your invitation Command availabe : > hi > /author
def RECEIVE_MESSAGE(op): ''' This is sample for implement BOT in LINE group Invite your BOT to group, then BOT will auto accept your invitation Command availabe : > hi > /author ''' msg = op.message text = msg.text msg_id = msg.id receiver = msg.to ...
Gets a {'x','y'}, a number of ticks and ticks labels, and returns the necessary axis options for the given configuration.
def _get_ticks(data, xy, ticks, ticklabels): """Gets a {'x','y'}, a number of ticks and ticks labels, and returns the necessary axis options for the given configuration. """ axis_options = [] pgfplots_ticks = [] pgfplots_ticklabels = [] is_label_required = False for tick, ticklabel in zi...
Find out if the object is in fact a color bar.
def _is_colorbar_heuristic(obj): """Find out if the object is in fact a color bar. """ # TODO come up with something more accurate here # Might help: # TODO Are the colorbars exactly the l.collections.PolyCollection's? try: aspect = float(obj.get_aspect()) except ValueError: ...
Converts a color map as given in matplotlib to a color map as represented in PGFPlots.
def _mpl_cmap2pgf_cmap(cmap, data): """Converts a color map as given in matplotlib to a color map as represented in PGFPlots. """ if isinstance(cmap, mpl.colors.LinearSegmentedColormap): return _handle_linear_segmented_color_map(cmap, data) assert isinstance( cmap, mpl.colors.Listed...
Scales the array X such that it contains only integers.
def _scale_to_int(X, max_val=None): """ Scales the array X such that it contains only integers. """ if max_val is None: X = X / _gcd_array(X) else: X = X / max(1 / max_val, _gcd_array(X)) return [int(entry) for entry in X]
Return the largest real value h such that all elements in x are integer multiples of h.
def _gcd_array(X): """ Return the largest real value h such that all elements in x are integer multiples of h. """ greatest_common_divisor = 0.0 for x in X: greatest_common_divisor = _gcd(greatest_common_divisor, x) return greatest_common_divisor
Given two data points [X,Y], linearly interpolate those at x.
def _linear_interpolation(x, X, Y): """Given two data points [X,Y], linearly interpolate those at x. """ return (Y[1] * (x - X[0]) + Y[0] * (X[1] - x)) / (X[1] - X[0])
A rather poor way of telling whether an axis has a colorbar associated: Check the next axis environment, and see if it is de facto a color bar; if yes, return the color bar object.
def _find_associated_colorbar(obj): """A rather poor way of telling whether an axis has a colorbar associated: Check the next axis environment, and see if it is de facto a color bar; if yes, return the color bar object. """ for child in obj.get_children(): try: cbar = child.color...
Adds legend code.
def draw_legend(data, obj): """Adds legend code. """ texts = [] children_alignment = [] for text in obj.texts: texts.append("{}".format(text.get_text())) children_alignment.append("{}".format(text.get_horizontalalignment())) # Get the location. # http://matplotlib.org/api/le...
Returns an available filename. :param file_kind: Name under which numbering is recorded, such as 'img' or 'table'. :type file_kind: str :param ext: Filename extension. :type ext: str :returns: (filename, rel_filepath) where filename is a path in the filesystem ...
def new_filename(data, file_kind, ext): """Returns an available filename. :param file_kind: Name under which numbering is recorded, such as 'img' or 'table'. :type file_kind: str :param ext: Filename extension. :type ext: str :returns: (filename, rel_filepath) where file...
Adds code for drawing an ordinary path in PGFPlots (TikZ).
def draw_path(data, path, draw_options=None, simplify=None): """Adds code for drawing an ordinary path in PGFPlots (TikZ). """ # For some reasons, matplotlib sometimes adds void paths which consist of # only one point and have 0 fill opacity. To not let those clutter the # output TeX file, bail out ...
Returns PGFPlots code for a number of patch objects.
def draw_pathcollection(data, obj): """Returns PGFPlots code for a number of patch objects. """ content = [] # gather data assert obj.get_offsets() is not None labels = ["x" + 21 * " ", "y" + 21 * " "] dd = obj.get_offsets() draw_options = ["only marks"] table_options = [] if o...
Get the draw options for a given (patch) object.
def get_draw_options(data, obj, ec, fc, style, width): """Get the draw options for a given (patch) object. """ draw_options = [] if ec is not None: data, col, ec_rgba = color.mpl_color2xcolor(data, ec) if ec_rgba[3] != 0.0: # Don't draw if it's invisible anyways. ...
Translates a line style of matplotlib to the corresponding style in PGFPlots.
def mpl_linestyle2pgfplots_linestyle(line_style, line=None): """Translates a line style of matplotlib to the corresponding style in PGFPlots. """ # linestyle is a string or dash tuple. Legal string values are # solid|dashed|dashdot|dotted. The dash tuple is (offset, onoffseq) where onoffseq # i...
Returns the PGFPlots code for an graphics environment holding a rendering of the object.
def draw_quadmesh(data, obj): """Returns the PGFPlots code for an graphics environment holding a rendering of the object. """ content = [] # Generate file name for current object filename, rel_filepath = files.new_filename(data, "img", ".png") # Get the dpi for rendering and store the o...
Translates a matplotlib color specification into a proper LaTeX xcolor.
def mpl_color2xcolor(data, matplotlib_color): """Translates a matplotlib color specification into a proper LaTeX xcolor. """ # Convert it to RGBA. my_col = numpy.array(mpl.colors.ColorConverter().to_rgba(matplotlib_color)) # If the alpha channel is exactly 0, then the color is really 'none' # r...
Return the PGFPlots code for patches.
def draw_patch(data, obj): """Return the PGFPlots code for patches. """ # Gather the draw options. data, draw_options = mypath.get_draw_options( data, obj, obj.get_edgecolor(), obj.get_facecolor(), obj.get_linestyle(), obj.get_linewidth(), ) if is...
Returns PGFPlots code for a number of patch objects.
def draw_patchcollection(data, obj): """Returns PGFPlots code for a number of patch objects. """ content = [] # Gather the draw options. try: ec = obj.get_edgecolor()[0] except IndexError: ec = None try: fc = obj.get_facecolor()[0] except IndexError: fc =...
Return the PGFPlots code for rectangles.
def _draw_rectangle(data, obj, draw_options): """Return the PGFPlots code for rectangles. """ # Objects with labels are plot objects (from bar charts, etc). Even those without # labels explicitly set have a label of "_nolegend_". Everything else should be # skipped because they likely correspong t...
Return the PGFPlots code for ellipses.
def _draw_ellipse(data, obj, draw_options): """Return the PGFPlots code for ellipses. """ if isinstance(obj, mpl.patches.Circle): # circle specialization return _draw_circle(data, obj, draw_options) x, y = obj.center ff = data["float format"] if obj.angle != 0: fmt = "ro...
Return the PGFPlots code for circles.
def _draw_circle(data, obj, draw_options): """Return the PGFPlots code for circles. """ x, y = obj.center ff = data["float format"] cont = ("\\draw[{}] (axis cs:" + ff + "," + ff + ") circle (" + ff + ");\n").format( ",".join(draw_options), x, y, obj.get_radius() ) return data, cont
Returns the PGFPlots code for an image environment.
def draw_image(data, obj): """Returns the PGFPlots code for an image environment. """ content = [] filename, rel_filepath = files.new_filename(data, "img", ".png") # store the image as in a file img_array = obj.get_array() dims = img_array.shape if len(dims) == 2: # the values are gi...
Check if line is in legend.
def get_legend_text(obj): """Check if line is in legend. """ leg = obj.axes.get_legend() if leg is None: return None keys = [l.get_label() for l in leg.legendHandles if l is not None] values = [l.get_text() for l in leg.texts] label = obj.get_label() d = dict(zip(keys, values))...
The coordinates might not be in data coordinates, but could be sometimes in axes coordinates. For example, the matplotlib command axes.axvline(2) will have the y coordinates set to 0 and 1, not to the limits. Therefore, a two-stage transform has to be applied: 1. first transforming to display co...
def transform_to_data_coordinates(obj, xdata, ydata): """The coordinates might not be in data coordinates, but could be sometimes in axes coordinates. For example, the matplotlib command axes.axvline(2) will have the y coordinates set to 0 and 1, not to the limits. Therefore, a two-stage transform...
Main function. Here, the recursion into the image starts and the contents are picked up. The actual file gets written in this routine. :param figure: either a Figure object or 'gcf' (default). :param figurewidth: If not ``None``, this will be used as figure width within the TikZ/PG...
def get_tikz_code( figure="gcf", filepath=None, figurewidth=None, figureheight=None, textsize=10.0, tex_relative_path_to_data=None, externalize_tables=False, override_externals=False, strict=False, wrap=True, add_axis_environment=True, extra_axis_parameters=None, extr...
Same as `get_tikz_code()`, but actually saves the code to a file. :param filepath: The file to which the TikZ output will be written. :type filepath: str :param encoding: Sets the text encoding of the output file, e.g. 'utf-8'. For supported values: see ``codecs`` module. :returns...
def save(filepath, *args, encoding=None, **kwargs): """Same as `get_tikz_code()`, but actually saves the code to a file. :param filepath: The file to which the TikZ output will be written. :type filepath: str :param encoding: Sets the text encoding of the output file, e.g. 'utf-8'. ...
Returns the list of custom color definitions for the TikZ file.
def _get_color_definitions(data): """Returns the list of custom color definitions for the TikZ file. """ definitions = [] fmt = "\\definecolor{{{}}}{{rgb}}{{" + ",".join(3 * [data["float format"]]) + "}}" for name, rgb in data["custom colors"].items(): definitions.append(fmt.format(name, rgb...
Prints message to screen indicating the use of PGFPlots and its libraries.
def _print_pgfplot_libs_message(data): """Prints message to screen indicating the use of PGFPlots and its libraries.""" pgfplotslibs = ",".join(list(data["pgfplots libs"])) tikzlibs = ",".join(list(data["tikz libs"])) print(70 * "=") print("Please add the following lines to your LaTeX preamble:...
Iterates over all children of the current object, gathers the contents contributing to the resulting PGFPlots file, and returns those.
def _recurse(data, obj): """Iterates over all children of the current object, gathers the contents contributing to the resulting PGFPlots file, and returns those. """ content = _ContentManager() for child in obj.get_children(): # Some patches are Spines, too; skip those entirely. # S...
Extends with a list and a z-order
def extend(self, content, zorder): """ Extends with a list and a z-order """ if zorder not in self._content: self._content[zorder] = [] self._content[zorder].extend(content)
Returns the PGFPlots code for an Line2D environment.
def draw_line2d(data, obj): """Returns the PGFPlots code for an Line2D environment. """ content = [] addplot_options = [] # If line is of length 0, do nothing. Otherwise, an empty \addplot table will be # created, which will be interpreted as an external data source in either the file # ''...
Returns Pgfplots code for a number of patch objects.
def draw_linecollection(data, obj): """Returns Pgfplots code for a number of patch objects. """ content = [] edgecolors = obj.get_edgecolors() linestyles = obj.get_linestyles() linewidths = obj.get_linewidths() paths = obj.get_paths() for i, path in enumerate(paths): color = ed...
Translates a marker style of matplotlib to the corresponding style in PGFPlots.
def _mpl_marker2pgfp_marker(data, mpl_marker, marker_face_color): """Translates a marker style of matplotlib to the corresponding style in PGFPlots. """ # try default list try: pgfplots_marker = _MP_MARKER2PGF_MARKER[mpl_marker] except KeyError: pass else: if (marker_...
Paints text on the graph.
def draw_text(data, obj): """Paints text on the graph. """ content = [] properties = [] style = [] if isinstance(obj, mpl.text.Annotation): _annotation(obj, data, content) # 1: coordinates # 2: properties (shapes, rotation, etc) # 3: text style # 4: the text # ...
Converts matplotlib positioning to pgf node positioning. Not quite accurate but the results are equivalent more or less.
def _transform_positioning(ha, va): """Converts matplotlib positioning to pgf node positioning. Not quite accurate but the results are equivalent more or less.""" if ha == "center" and va == "center": return None ha_mpl_to_tikz = {"right": "east", "left": "west", "center": ""} va_mpl_to_tik...
Import a JSON file or file-like object into a `rows.Table`. If a file-like object is provided it MUST be open in text (non-binary) mode on Python 3 and could be open in both binary or text mode on Python 2.
def import_from_json(filename_or_fobj, encoding="utf-8", *args, **kwargs): """Import a JSON file or file-like object into a `rows.Table`. If a file-like object is provided it MUST be open in text (non-binary) mode on Python 3 and could be open in both binary or text mode on Python 2. """ source = ...
Export a `rows.Table` to a JSON file or file-like object. If a file-like object is provided it MUST be open in binary mode (like in `open('myfile.json', mode='wb')`).
def export_to_json( table, filename_or_fobj=None, encoding="utf-8", indent=None, *args, **kwargs ): """Export a `rows.Table` to a JSON file or file-like object. If a file-like object is provided it MUST be open in binary mode (like in `open('myfile.json', mode='wb')`). """ # TODO: will work onl...
Return the plugin name based on the URI
def plugin_name_by_uri(uri): "Return the plugin name based on the URI" # TODO: parse URIs like 'sqlite://' also parsed = urlparse(uri) basename = os.path.basename(parsed.path) if not basename.strip(): raise RuntimeError("Could not identify file format.") plugin_name = basename.split("...
Return the file extension used by this plugin
def extension_by_source(source, mime_type): "Return the file extension used by this plugin" # TODO: should get this information from the plugin extension = source.plugin_name if extension: return extension if mime_type: return mime_type.split("/")[-1]
Return the plugin name based on the MIME type
def plugin_name_by_mime_type(mime_type, mime_name, file_extension): "Return the plugin name based on the MIME type" return MIME_TYPE_TO_PLUGIN_NAME.get( normalize_mime_type(mime_type, mime_name, file_extension), None )
Return a `rows.Source` with information for a given URI If URI starts with "http" or "https" the file will be downloaded. This function should only be used if the URI already exists because it's going to download/open the file to detect its encoding and MIME type.
def detect_source(uri, verify_ssl, progress, timeout=5): """Return a `rows.Source` with information for a given URI If URI starts with "http" or "https" the file will be downloaded. This function should only be used if the URI already exists because it's going to download/open the file to detect its e...
Import data described in a `rows.Source` into a `rows.Table`
def import_from_source(source, default_encoding, *args, **kwargs): "Import data described in a `rows.Source` into a `rows.Table`" # TODO: test open_compressed plugin_name = source.plugin_name kwargs["encoding"] = ( kwargs.get("encoding", None) or source.encoding or default_encoding ) t...