ngram
listlengths
0
67.8k
[ "<gh_stars>1-10 # Generated by Django 4.0.4 on 2022-05-07 09:51 from django.db import migrations", "from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'), ] operations", "Django 4.0.4 on 2022-05-07 09:51 from django.db import migrations class Migration(migrations.Migration): dependencies =", "import migrations class Migration(migrations.Migration): dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'), ] operations = [", "Migration(migrations.Migration): dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'), ] operations = [ migrations.RenameModel( old_name='HTTPRequest', new_name='Payload',", "= [ ('logger', '0028_remove_httprequest_api_version_and_more'), ] operations = [ migrations.RenameModel( old_name='HTTPRequest', new_name='Payload', ), ]", "class Migration(migrations.Migration): dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'), ] operations = [ migrations.RenameModel( old_name='HTTPRequest',", "dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'), ] operations = [ migrations.RenameModel( old_name='HTTPRequest', new_name='Payload', ),", "by Django 4.0.4 on 2022-05-07 09:51 from django.db import migrations class Migration(migrations.Migration): dependencies", "migrations class Migration(migrations.Migration): dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'), ] operations = [ migrations.RenameModel(", "4.0.4 on 2022-05-07 09:51 from django.db import migrations class Migration(migrations.Migration): dependencies = [", "on 2022-05-07 09:51 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('logger',", "Generated by Django 4.0.4 on 2022-05-07 09:51 from django.db import migrations class Migration(migrations.Migration):", "2022-05-07 09:51 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'),", "# Generated by Django 4.0.4 on 2022-05-07 09:51 from django.db import migrations class", "09:51 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'), ]", "django.db import migrations class Migration(migrations.Migration): dependencies = [ ('logger', '0028_remove_httprequest_api_version_and_more'), ] operations =" ]
[ "tf import tfcoreml import argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args()", "import argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb,", "= argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={", "import tfcoreml import argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args() model", "args = parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1, 28, 28,", "argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0':", "tensorflow as tf import tfcoreml import argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args", "= tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1, 28, 28, 1]}, image_input_names=['flatten_input:0']) spec =", "parser.add_argument('output_mlmodel') args = parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1, 28,", "parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1,", "model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1, 28, 28, 1]}, image_input_names=['flatten_input:0']) spec", "argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel,", "tfcoreml import argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args() model =", "as tf import tfcoreml import argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args =", "= parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1, 28, 28, 1]},", "parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1, 28, 28, 1]}, image_input_names=['flatten_input:0'])", "parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel') args = parser.parse_args() model = tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'],", "import tensorflow as tf import tfcoreml import argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb') parser.add_argument('output_mlmodel')", "tfcoreml.convert(tf_model_path=args.input_pb, mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1, 28, 28, 1]}, image_input_names=['flatten_input:0']) spec = model.get_spec()", "<filename>nn/mnist_tf_coreml/pb_to_coreml.py import tensorflow as tf import tfcoreml import argparse parser = argparse.ArgumentParser() parser.add_argument('input_pb')", "mlmodel_path=args.output_mlmodel, output_feature_names=['dense_1/Softmax:0'], input_name_shape_dict={ 'flatten_input:0': [1, 28, 28, 1]}, image_input_names=['flatten_input:0']) spec = model.get_spec() print(spec.description.output)" ]
[ "makes. This congests the # output unnecessarily. def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name,", "Assume the library list is valid -- 'check_library_list()' is # called from 'finalize_options()',", "is not None: for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): if", "None lib_names = [] for lib in self.libraries: lib_names.append(lib.name) return lib_names def get_source_files(self):", "libraries to build self.libraries = None # Compilation options for all libraries self.include_dirs", "user_options = [ ('build-clib=', 'b', \"directory to build C/C++ libraries to\"), ('build-temp=', 't',", "First, compile the source code to object files in the library # directory.", ") # Now \"link\" the object files together into a static library. #", "is not None: # 'define' option is a list of (name,value) tuples for", "and build-temp default # to build-temp as defined by the \"build\" command. This", "I am not able to use the distutils compiler in a threaded scenario", "if not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs to be instances", "'Library' not \" + str(type(lib)) ) # lib.validate() def get_library_names(self): # Assume the", "a class # call Library. This class is what will hold all of", "to put temporary build by-products\"), ('debug', 'g', \"compile with debugging information\"), ('force', 'f',", "This might be confusing: both build-clib and build-temp default # to build-temp as", "lib in libraries: if not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs", "C/C++ libraries to\"), ('build-temp=', 't', \"directory to put temporary build by-products\"), ('debug', 'g',", "a method \"build\" that ghets called. if this method is overridden # it", "default # to build-temp as defined by the \"build\" command. This is because", "self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str):", "can be used # Instead of using a tuple and a dict to", "libraries: if not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs to be", "temporary build directory.) include_dirs = lib.include_dirs objects = self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs,", "if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs", "to object files in the library # directory. (This should probably change to", "so it should be! if not self.libraries: return None lib_names = [] for", "self.include_dirs = self.include_dirs.split(os.pathsep) def run(self): if not self.libraries: return # we are leaving", "I created a wrapper class around the # Library which institutes a multi", "of funky. from distutils.ccompiler import new_compiler self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force )", "Unix at least, this isn't really linking -- it just # builds an", "'%s' library\", lib.name) try: lib.build(self) except NotImplementedError: # First, compile the source code", "valid -- 'check_library_list()' is # called from 'finalize_options()', so it should be! if", "the various build components needed # for a build. Now. There is a", "compile the source code to object files in the library # directory. (This", "if not self.libraries: return None lib_names = [] for lib in self.libraries: lib_names.append(lib.name)", "('force', 'f', \"forcibly build everything (ignore file timestamps)\"), ] boolean_options = ['debug', 'force']", "# called from 'finalize_options()', so it should be! if not self.libraries: return None", "replace the compilers spawn and mkpath with the onces that we have written", "is a list of (name,value) tuples for (name, value) in self.define: self.compiler.define_macro(name, value)", "leaving this here so if wanted the built in compiler for distutils can", "distutils. I am not able to use the distutils compiler in a threaded", "<filename>build_framework/build_clib.py # -*- coding: utf-8 -*- import distutils import distutils.errors import distutils.core import", "return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for lib in self.libraries: filenames.extend(lib.sources)", "in self.libraries: lib_names.append(lib.name) return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for lib", "files together into a static library. # (On Unix at least, this isn't", "if not isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options need to be either", "build components needed # for a build. Now. There is a method \"build\"", "tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options need to be either a list or a", "wrapper class around the # Library which institutes a multi threaded compiling process.", "distutils.core import distutils.command.build_clib import distutils.log from distutils.sysconfig import customize_compiler import distutils.dir_util import os", "-*- coding: utf-8 -*- import distutils import distutils.errors import distutils.core import distutils.command.build_clib import", "to build C/C++ libraries to\"), ('build-temp=', 't', \"directory to put temporary build by-products\"),", "self.libraries: return None lib_names = [] for lib in self.libraries: lib_names.append(lib.name) return lib_names", "['debug', 'force'] help_options = [ ('help-compiler', None, \"list available compilers\", distutils.command.build_clib.show_compilers), ] def", "lib.build(self) except NotImplementedError: # First, compile the source code to object files in", "of 'libraries' needs to be instances of 'Library' not \" + str(type(lib)) )", "libraries self.include_dirs = None self.define = None self.undef = None self.debug = None", "'debug'), ('force', 'force') ) if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries", "mode, dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self): self.build_clib = None self.build_temp = None #", "for a build. Now. There is a method \"build\" that ghets called. if", "that ghets called. if this method is overridden # it is what gets", "debug=self.debug ) # Now \"link\" the object files together into a static library.", "compiler in a threaded scenario # because it was not designed to be", "is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is", "dict to provide compiler options I decided to make a class # call", "inject the verbose option. # the compilers version does not allow for setting", "Python # extensions -- but I want to keep my options open. self.set_undefined_options(", "import distutils.core import distutils.command.build_clib import distutils.log from distutils.sysconfig import customize_compiler import distutils.dir_util import", "lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for lib in self.libraries: filenames.extend(lib.sources) return", ") def initialize_options(self): self.build_clib = None self.build_temp = None # List of libraries", "coding: utf-8 -*- import distutils import distutils.errors import distutils.core import distutils.command.build_clib import distutils.log", "is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep)", "# the compilers version does not allow for setting of a verbose level", "call Library. This class is what will hold all of the various build", "in a temporary build directory.) include_dirs = lib.include_dirs objects = self.compiler.compile( lib.sources, output_dir=self.build_temp,", "file timestamps)\"), ] boolean_options = ['debug', 'force'] help_options = [ ('help-compiler', None, \"list", "to make a class # call Library. This class is what will hold", "object files together into a static library. # (On Unix at least, this", "longer use the built in # compiler with distutils. I am not able", "in a threaded scenario # because it was not designed to be thread", "import os from . import spawn_process from .library.library_base import Library class build_clib(distutils.core.Command): user_options", "compiler options I decided to make a class # call Library. This class", "to be either a list or a tuple.\") for lib in libraries: if", "self.libraries = None # Compilation options for all libraries self.include_dirs = None self.define", "be instances of 'Library' not \" + str(type(lib)) ) # lib.validate() def get_library_names(self):", "= [ ('build-clib=', 'b', \"directory to build C/C++ libraries to\"), ('build-temp=', 't', \"directory", "defaults to a verbose level of 1 which # which prints out each", "self.libraries: lib_names.append(lib.name) return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for lib in", "str(type(lib)) ) # lib.validate() def get_library_names(self): # Assume the library list is valid", "which prints out each and every directory it makes. This congests the #", "in compiler for distutils can be used # Instead of using a tuple", "value) if self.undef is not None: for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def", "None self.build_temp = None # List of libraries to build self.libraries = None", "the point of view of building Python # extensions -- but I want", "class around the # Library which institutes a multi threaded compiling process. we", "get_source_files(self): self.check_library_list(self.libraries) filenames = [] for lib in self.libraries: filenames.extend(lib.sources) return filenames def", "or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) def run(self): if not self.libraries:", "= None # Compilation options for all libraries self.include_dirs = None self.define =", "filenames.extend(lib.sources) return filenames def build_libraries(self, libraries): for lib in libraries: distutils.log.info(\"building '%s' library\",", "filenames = [] for lib in self.libraries: filenames.extend(lib.sources) return filenames def build_libraries(self, libraries):", "if self.undef is not None: for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self,", "with distutils. I am not able to use the distutils compiler in a", "os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is None:", "\"directory to build C/C++ libraries to\"), ('build-temp=', 't', \"directory to put temporary build", "into a static library. # (On Unix at least, this isn't really linking", "prints out each and every directory it makes. This congests the # output", "**kwargs) # we override the compilers mkpath so we can inject the verbose", "os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs", "setting of a verbose level # and distutils.dir_util.mkpath defaults to a verbose level", "override the compilers mkpath so we can inject the verbose option. # the", "from .library.library_base import Library class build_clib(distutils.core.Command): user_options = [ ('build-clib=', 'b', \"directory to", "verbose option. # the compilers version does not allow for setting of a", "a build. Now. There is a method \"build\" that ghets called. if this", "'check_library_list()' is # called from 'finalize_options()', so it should be! if not self.libraries:", "lib.name) try: lib.build(self) except NotImplementedError: # First, compile the source code to object", "self.undef = None self.debug = None self.force = 0 self.compiler = None def", "dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self): self.build_clib = None self.build_temp = None # List", "for setting of a verbose level # and distutils.dir_util.mkpath defaults to a verbose", "a tuple.\") for lib in libraries: if not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents", "use the built in # compiler with distutils. I am not able to", "self.check_library_list(self.libraries) filenames = [] for lib in self.libraries: filenames.extend(lib.sources) return filenames def build_libraries(self,", "import distutils.log from distutils.sysconfig import customize_compiler import distutils.dir_util import os from . import", "libraries): if not isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options need to be", "to putting object # files in a temporary build directory.) include_dirs = lib.include_dirs", "to a verbose level of 1 which # which prints out each and", "for distutils can be used # Instead of using a tuple and a", "of 'Library' not \" + str(type(lib)) ) # lib.validate() def get_library_names(self): # Assume", "self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define", "os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is", "every directory it makes. This congests the # output unnecessarily. def mkpath(self, name,", "keep my options open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug',", "is # called from 'finalize_options()', so it should be! if not self.libraries: return", "to provide compiler options I decided to make a class # call Library.", "multi threaded compiling process. we no longer use the built in # compiler", "needed # for a build. Now. There is a method \"build\" that ghets", "this here so if wanted the built in compiler for distutils can be", "it just # builds an archive. Whatever.) self.compiler.create_static_lib( objects, lib.name, output_dir=self.build_clib, debug=self.debug )", "distutils.dir_util.mkpath defaults to a verbose level of 1 which # which prints out", "compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs) # we override the", "self.define: self.compiler.define_macro(name, value) if self.undef is not None: for macro in self.undef: self.compiler.undefine_macro(macro)", "# Assume the library list is valid -- 'check_library_list()' is # called from", "least, this isn't really linking -- it just # builds an archive. Whatever.)", "# because it was not designed to be thread safe and things get", "files in the library # directory. (This should probably change to putting object", "are really just temporary build # by-products, at least from the point of", "lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) # Now \"link\" the object files together", "the verbose option. # the compilers version does not allow for setting of", "at least, this isn't really linking -- it just # builds an archive.", "we are leaving this here so if wanted the built in compiler for", "the distutils compiler in a threaded scenario # because it was not designed", "safe and things get all kinds of funky. from distutils.ccompiler import new_compiler self.compiler", "\"build\" that ghets called. if this method is overridden # it is what", "os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or", "# extensions -- but I want to keep my options open. self.set_undefined_options( 'build',", "components needed # for a build. Now. There is a method \"build\" that", "library # directory. (This should probably change to putting object # files in", "compiler with distutils. I am not able to use the distutils compiler in", "if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs", "('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force') ) if not os.path.exists(self.build_clib): os.makedirs(self.build_clib)", "build by-products\"), ('debug', 'g', \"compile with debugging information\"), ('force', 'f', \"forcibly build everything", "'compiler'), ('debug', 'debug'), ('force', 'force') ) if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp):", "options I decided to make a class # call Library. This class is", "def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for lib in self.libraries: filenames.extend(lib.sources) return filenames", "thread safe and things get all kinds of funky. from distutils.ccompiler import new_compiler", "congests the # output unnecessarily. def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run,", "except NotImplementedError: # First, compile the source code to object files in the", "of a verbose level # and distutils.dir_util.mkpath defaults to a verbose level of", "List of libraries to build self.libraries = None # Compilation options for all", "the source code to object files in the library # directory. (This should", "not self.libraries: return None lib_names = [] for lib in self.libraries: lib_names.append(lib.name) return", "change to putting object # files in a temporary build directory.) include_dirs =", "= ['debug', 'force'] help_options = [ ('help-compiler', None, \"list available compilers\", distutils.command.build_clib.show_compilers), ]", "we can inject the verbose option. # the compilers version does not allow", "was not designed to be thread safe and things get all kinds of", "list of (name,value) tuples for (name, value) in self.define: self.compiler.define_macro(name, value) if self.undef", "# 'define' option is a list of (name,value) tuples for (name, value) in", "object files in the library # directory. (This should probably change to putting", "lib.validate() def get_library_names(self): # Assume the library list is valid -- 'check_library_list()' is", "threaded compiling process. we no longer use the built in # compiler with", "output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) # Now \"link\" the object files together into", "libraries: distutils.log.info(\"building '%s' library\", lib.name) try: lib.build(self) except NotImplementedError: # First, compile the", "list or a tuple.\") for lib in libraries: if not isinstance(lib, Library): raise", "NotImplementedError: # First, compile the source code to object files in the library", "institutes a multi threaded compiling process. we no longer use the built in", "check_library_list(self, libraries): if not isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options need to", "from the point of view of building Python # extensions -- but I", "self.compiler = None def finalize_options(self): # This might be confusing: both build-clib and", "be thread safe and things get all kinds of funky. from distutils.ccompiler import", "'finalize_options()', so it should be! if not self.libraries: return None lib_names = []", "None self.undef = None self.debug = None self.force = 0 self.compiler = None", "if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) def run(self): if not self.libraries: return #", "ghets called. if this method is overridden # it is what gets used", "\"'libraries' options need to be either a list or a tuple.\") for lib", "for lib in self.libraries: filenames.extend(lib.sources) return filenames def build_libraries(self, libraries): for lib in", "name, mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self): self.build_clib = None", "can inject the verbose option. # the compilers version does not allow for", "self.build_libraries(self.libraries) def check_library_list(self, libraries): if not isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options", "either a list or a tuple.\") for lib in libraries: if not isinstance(lib,", "probably change to putting object # files in a temporary build directory.) include_dirs", "spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs) # we override the compilers mkpath so we", "a wrapper class around the # Library which institutes a multi threaded compiling", "allow for setting of a verbose level # and distutils.dir_util.mkpath defaults to a", "as defined by the \"build\" command. This is because # I think that", "lib in self.libraries: filenames.extend(lib.sources) return filenames def build_libraries(self, libraries): for lib in libraries:", "distutils.command.build_clib import distutils.log from distutils.sysconfig import customize_compiler import distutils.dir_util import os from .", "filenames def build_libraries(self, libraries): for lib in libraries: distutils.log.info(\"building '%s' library\", lib.name) try:", "None def finalize_options(self): # This might be confusing: both build-clib and build-temp default", "boolean_options = ['debug', 'force'] help_options = [ ('help-compiler', None, \"list available compilers\", distutils.command.build_clib.show_compilers),", "the built in compiler for distutils can be used # Instead of using", "= self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if", "Now. There is a method \"build\" that ghets called. if this method is", "if this method is overridden # it is what gets used instread of", "compilers version does not allow for setting of a verbose level # and", "in the library # directory. (This should probably change to putting object #", "= None self.define = None self.undef = None self.debug = None self.force =", "\"directory to put temporary build by-products\"), ('debug', 'g', \"compile with debugging information\"), ('force',", "temporary build by-products\"), ('debug', 'g', \"compile with debugging information\"), ('force', 'f', \"forcibly build", "the \"build\" command. This is because # I think that C libraries are", "for lib in libraries: distutils.log.info(\"building '%s' library\", lib.name) try: lib.build(self) except NotImplementedError: #", "import customize_compiler import distutils.dir_util import os from . import spawn_process from .library.library_base import", "I decided to make a class # call Library. This class is what", "('help-compiler', None, \"list available compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs)", "because it was not designed to be thread safe and things get all", "*args, **kwargs): spawn_process.spawn(*args, **kwargs) # we override the compilers mkpath so we can", "= None self.build_temp = None # List of libraries to build self.libraries =", "('debug', 'debug'), ('force', 'force') ) if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp)", "= [] for lib in self.libraries: filenames.extend(lib.sources) return filenames def build_libraries(self, libraries): for", "None # Compilation options for all libraries self.include_dirs = None self.define = None", "# compiler with distutils. I am not able to use the distutils compiler", "internal compiler. I created a wrapper class around the # Library which institutes", "really linking -- it just # builds an archive. Whatever.) self.compiler.create_static_lib( objects, lib.name,", "# lib.validate() def get_library_names(self): # Assume the library list is valid -- 'check_library_list()'", "mkpath with the onces that we have written self.compiler.spawn = self.spawn self.compiler.mkpath =", "(name, value) in self.define: self.compiler.define_macro(name, value) if self.undef is not None: for macro", "# and distutils.dir_util.mkpath defaults to a verbose level of 1 which # which", "to use the distutils compiler in a threaded scenario # because it was", "isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) def run(self): if not self.libraries: return # we", "lib.include_dirs objects = self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) # Now \"link\"", "option is a list of (name,value) tuples for (name, value) in self.define: self.compiler.define_macro(name,", "**kwargs): spawn_process.spawn(*args, **kwargs) # we override the compilers mkpath so we can inject", "of the various build components needed # for a build. Now. There is", "all of the various build components needed # for a build. Now. There", "self.include_dirs = None self.define = None self.undef = None self.debug = None self.force", "not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs to be instances of", "lib in self.libraries: lib_names.append(lib.name) return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for", "in libraries: if not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs to", "# Now \"link\" the object files together into a static library. # (On", "distutils can be used # Instead of using a tuple and a dict", "There is a method \"build\" that ghets called. if this method is overridden", "wanted the built in compiler for distutils can be used # Instead of", "gets used instread of the internal compiler. I created a wrapper class around", "= [] for lib in self.libraries: lib_names.append(lib.name) return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames", "here so if wanted the built in compiler for distutils can be used", "self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) # Now \"link\" the object files", "if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if", "isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs to be instances of 'Library'", "it should be! if not self.libraries: return None lib_names = [] for lib", "what gets used instread of the internal compiler. I created a wrapper class", "= 0 self.compiler = None def finalize_options(self): # This might be confusing: both", "distutils.log.info(\"building '%s' library\", lib.name) try: lib.build(self) except NotImplementedError: # First, compile the source", "compiling process. we no longer use the built in # compiler with distutils.", "# call Library. This class is what will hold all of the various", "\"forcibly build everything (ignore file timestamps)\"), ] boolean_options = ['debug', 'force'] help_options =", "build-temp default # to build-temp as defined by the \"build\" command. This is", "-- but I want to keep my options open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'),", "import distutils.command.build_clib import distutils.log from distutils.sysconfig import customize_compiler import distutils.dir_util import os from", "all libraries self.include_dirs = None self.define = None self.undef = None self.debug =", "all kinds of funky. from distutils.ccompiler import new_compiler self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run,", "def check_library_list(self, libraries): if not isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options need", "timestamps)\"), ] boolean_options = ['debug', 'force'] help_options = [ ('help-compiler', None, \"list available", "self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option", "to be instances of 'Library' not \" + str(type(lib)) ) # lib.validate() def", "instances of 'Library' not \" + str(type(lib)) ) # lib.validate() def get_library_names(self): #", "value) in self.define: self.compiler.define_macro(name, value) if self.undef is not None: for macro in", "# First, compile the source code to object files in the library #", "are leaving this here so if wanted the built in compiler for distutils", "return None lib_names = [] for lib in self.libraries: lib_names.append(lib.name) return lib_names def", "build # by-products, at least from the point of view of building Python", "various build components needed # for a build. Now. There is a method", "if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define'", "Library. This class is what will hold all of the various build components", "build. Now. There is a method \"build\" that ghets called. if this method", "to be thread safe and things get all kinds of funky. from distutils.ccompiler", "want to keep my options open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler',", "This congests the # output unnecessarily. def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name, mode,", "# Instead of using a tuple and a dict to provide compiler options", "# we are leaving this here so if wanted the built in compiler", "help_options = [ ('help-compiler', None, \"list available compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self, *args,", "C libraries are really just temporary build # by-products, at least from the", "'define' option is a list of (name,value) tuples for (name, value) in self.define:", "spawn and mkpath with the onces that we have written self.compiler.spawn = self.spawn", "a temporary build directory.) include_dirs = lib.include_dirs objects = self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros,", "verbose level # and distutils.dir_util.mkpath defaults to a verbose level of 1 which", "view of building Python # extensions -- but I want to keep my", ") # replace the compilers spawn and mkpath with the onces that we", "build C/C++ libraries to\"), ('build-temp=', 't', \"directory to put temporary build by-products\"), ('debug',", "temporary build # by-products, at least from the point of view of building", "the library list is valid -- 'check_library_list()' is # called from 'finalize_options()', so", "is overridden # it is what gets used instread of the internal compiler.", "for lib in libraries: if not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents of 'libraries'", "to keep my options open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'),", "compiler=self.compiler, dry_run=self.dry_run, force=self.force ) # replace the compilers spawn and mkpath with the", "make a class # call Library. This class is what will hold all", "is a method \"build\" that ghets called. if this method is overridden #", "source code to object files in the library # directory. (This should probably", "spawn_process.spawn(*args, **kwargs) # we override the compilers mkpath so we can inject the", "static library. # (On Unix at least, this isn't really linking -- it", "class build_clib(distutils.core.Command): user_options = [ ('build-clib=', 'b', \"directory to build C/C++ libraries to\"),", "the compilers version does not allow for setting of a verbose level #", "def finalize_options(self): # This might be confusing: both build-clib and build-temp default #", "of the internal compiler. I created a wrapper class around the # Library", "be confusing: both build-clib and build-temp default # to build-temp as defined by", "distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs to be instances of 'Library' not \" +", "compilers mkpath so we can inject the verbose option. # the compilers version", "customize_compiler import distutils.dir_util import os from . import spawn_process from .library.library_base import Library", "not allow for setting of a verbose level # and distutils.dir_util.mkpath defaults to", "of view of building Python # extensions -- but I want to keep", "and things get all kinds of funky. from distutils.ccompiler import new_compiler self.compiler =", "self.force = 0 self.compiler = None def finalize_options(self): # This might be confusing:", "everything (ignore file timestamps)\"), ] boolean_options = ['debug', 'force'] help_options = [ ('help-compiler',", "and every directory it makes. This congests the # output unnecessarily. def mkpath(self,", "mkpath so we can inject the verbose option. # the compilers version does", "'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force') ) if not os.path.exists(self.build_clib):", "-- 'check_library_list()' is # called from 'finalize_options()', so it should be! if not", "self.libraries: return # we are leaving this here so if wanted the built", "because # I think that C libraries are really just temporary build #", ") # lib.validate() def get_library_names(self): # Assume the library list is valid --", "threaded scenario # because it was not designed to be thread safe and", "self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): if not isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError(", "self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force') )", "object # files in a temporary build directory.) include_dirs = lib.include_dirs objects =", "self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is", "run(self): if not self.libraries: return # we are leaving this here so if", "options need to be either a list or a tuple.\") for lib in", "my options open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'),", "directory. (This should probably change to putting object # files in a temporary", "self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) def run(self):", "class # call Library. This class is what will hold all of the", "not self.libraries: return # we are leaving this here so if wanted the", "This is because # I think that C libraries are really just temporary", "build everything (ignore file timestamps)\"), ] boolean_options = ['debug', 'force'] help_options = [", "of 1 which # which prints out each and every directory it makes.", "(name,value) tuples for (name, value) in self.define: self.compiler.define_macro(name, value) if self.undef is not", "list is valid -- 'check_library_list()' is # called from 'finalize_options()', so it should", "(This should probably change to putting object # files in a temporary build", "unnecessarily. def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self):", "# Compilation options for all libraries self.include_dirs = None self.define = None self.undef", "the internal compiler. I created a wrapper class around the # Library which", "import spawn_process from .library.library_base import Library class build_clib(distutils.core.Command): user_options = [ ('build-clib=', 'b',", "by-products\"), ('debug', 'g', \"compile with debugging information\"), ('force', 'f', \"forcibly build everything (ignore", "I want to keep my options open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'),", "in self.define: self.compiler.define_macro(name, value) if self.undef is not None: for macro in self.undef:", "to build self.libraries = None # Compilation options for all libraries self.include_dirs =", "so if wanted the built in compiler for distutils can be used #", "able to use the distutils compiler in a threaded scenario # because it", "# directory. (This should probably change to putting object # files in a", "I think that C libraries are really just temporary build # by-products, at", "might be confusing: both build-clib and build-temp default # to build-temp as defined", "process. we no longer use the built in # compiler with distutils. I", "for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): if not isinstance(libraries, (list,", "self.define is not None: # 'define' option is a list of (name,value) tuples", "be used # Instead of using a tuple and a dict to provide", "\"build\" command. This is because # I think that C libraries are really", "library. # (On Unix at least, this isn't really linking -- it just", "overridden # it is what gets used instread of the internal compiler. I", "not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a", "compilers spawn and mkpath with the onces that we have written self.compiler.spawn =", "the library # directory. (This should probably change to putting object # files", "def get_library_names(self): # Assume the library list is valid -- 'check_library_list()' is #", "(ignore file timestamps)\"), ] boolean_options = ['debug', 'force'] help_options = [ ('help-compiler', None,", "include_dirs=include_dirs, debug=self.debug ) # Now \"link\" the object files together into a static", "class is what will hold all of the various build components needed #", "def initialize_options(self): self.build_clib = None self.build_temp = None # List of libraries to", "def run(self): if not self.libraries: return # we are leaving this here so", "from distutils.sysconfig import customize_compiler import distutils.dir_util import os from . import spawn_process from", "created a wrapper class around the # Library which institutes a multi threaded", "with the onces that we have written self.compiler.spawn = self.spawn self.compiler.mkpath = self.mkpath", "= lib.include_dirs objects = self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) # Now", "'force'] help_options = [ ('help-compiler', None, \"list available compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self,", "called from 'finalize_options()', so it should be! if not self.libraries: return None lib_names", "[] for lib in self.libraries: lib_names.append(lib.name) return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames =", "spawn_process from .library.library_base import Library class build_clib(distutils.core.Command): user_options = [ ('build-clib=', 'b', \"directory", ". import spawn_process from .library.library_base import Library class build_clib(distutils.core.Command): user_options = [ ('build-clib=',", "[] for lib in self.libraries: filenames.extend(lib.sources) return filenames def build_libraries(self, libraries): for lib", "it was not designed to be thread safe and things get all kinds", "in # compiler with distutils. I am not able to use the distutils", "that C libraries are really just temporary build # by-products, at least from", "mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self): self.build_clib =", "= [ ('help-compiler', None, \"list available compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self, *args, **kwargs):", "built in compiler for distutils can be used # Instead of using a", "is because # I think that C libraries are really just temporary build", "files in a temporary build directory.) include_dirs = lib.include_dirs objects = self.compiler.compile( lib.sources,", "a threaded scenario # because it was not designed to be thread safe", "tuple.\") for lib in libraries: if not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError( \"contents of", "\"compile with debugging information\"), ('force', 'f', \"forcibly build everything (ignore file timestamps)\"), ]", "self.compiler.spawn = self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs)", "finalize_options(self): # This might be confusing: both build-clib and build-temp default # to", "needs to be instances of 'Library' not \" + str(type(lib)) ) # lib.validate()", "funky. from distutils.ccompiler import new_compiler self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) #", "directory.) include_dirs = lib.include_dirs objects = self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug )", "import distutils.errors import distutils.core import distutils.command.build_clib import distutils.log from distutils.sysconfig import customize_compiler import", "build directory.) include_dirs = lib.include_dirs objects = self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug", "# -*- coding: utf-8 -*- import distutils import distutils.errors import distutils.core import distutils.command.build_clib", "import distutils import distutils.errors import distutils.core import distutils.command.build_clib import distutils.log from distutils.sysconfig import", "new_compiler self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) # replace the compilers spawn", "available compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs) # we override", "not designed to be thread safe and things get all kinds of funky.", "self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) # replace the compilers spawn and", "(On Unix at least, this isn't really linking -- it just # builds", "-*- import distutils import distutils.errors import distutils.core import distutils.command.build_clib import distutils.log from distutils.sysconfig", "[ ('build-clib=', 'b', \"directory to build C/C++ libraries to\"), ('build-temp=', 't', \"directory to", "decided to make a class # call Library. This class is what will", "the # Library which institutes a multi threaded compiling process. we no longer", "what will hold all of the various build components needed # for a", "and distutils.dir_util.mkpath defaults to a verbose level of 1 which # which prints", "('build-clib=', 'b', \"directory to build C/C++ libraries to\"), ('build-temp=', 't', \"directory to put", "# which prints out each and every directory it makes. This congests the", "least from the point of view of building Python # extensions -- but", "at least from the point of view of building Python # extensions --", "'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force') ) if", "not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs =", "None self.define = None self.undef = None self.debug = None self.force = 0", "will hold all of the various build components needed # for a build.", "a list or a tuple.\") for lib in libraries: if not isinstance(lib, Library):", "onces that we have written self.compiler.spawn = self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if", "macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) # Now \"link\" the object files together into a", "information\"), ('force', 'f', \"forcibly build everything (ignore file timestamps)\"), ] boolean_options = ['debug',", "it makes. This congests the # output unnecessarily. def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath(", "from 'finalize_options()', so it should be! if not self.libraries: return None lib_names =", "distutils.sysconfig import customize_compiler import distutils.dir_util import os from . import spawn_process from .library.library_base", "= self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) # Now \"link\" the object", "need to be either a list or a tuple.\") for lib in libraries:", "not isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options need to be either a", "= self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if", "be! if not self.libraries: return None lib_names = [] for lib in self.libraries:", "building Python # extensions -- but I want to keep my options open.", "[] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) def run(self): if not self.libraries: return", "kinds of funky. from distutils.ccompiler import new_compiler self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force", "distutils.errors import distutils.core import distutils.command.build_clib import distutils.log from distutils.sysconfig import customize_compiler import distutils.dir_util", "if not self.libraries: return # we are leaving this here so if wanted", "objects = self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) # Now \"link\" the", "that we have written self.compiler.spawn = self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if self.include_dirs", "to\"), ('build-temp=', 't', \"directory to put temporary build by-products\"), ('debug', 'g', \"compile with", "# it is what gets used instread of the internal compiler. I created", "mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self): self.build_clib = None self.build_temp", "a verbose level of 1 which # which prints out each and every", "self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a list of", "not None: for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): if not", "using a tuple and a dict to provide compiler options I decided to", "None, \"list available compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs) #", "None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a list", "os from . import spawn_process from .library.library_base import Library class build_clib(distutils.core.Command): user_options =", "build self.libraries = None # Compilation options for all libraries self.include_dirs = None", "] def spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs) # we override the compilers mkpath", "or a tuple.\") for lib in libraries: if not isinstance(lib, Library): raise distutils.errors.DistutilsSetupError(", "'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force') ) if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if", "= None self.force = 0 self.compiler = None def finalize_options(self): # This might", "this method is overridden # it is what gets used instread of the", "which institutes a multi threaded compiling process. we no longer use the built", "be either a list or a tuple.\") for lib in libraries: if not", "just temporary build # by-products, at least from the point of view of", "have written self.compiler.spawn = self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if self.include_dirs is not", "put temporary build by-products\"), ('debug', 'g', \"compile with debugging information\"), ('force', 'f', \"forcibly", "distutils.ccompiler import new_compiler self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) # replace the", "code to object files in the library # directory. (This should probably change", "return filenames def build_libraries(self, libraries): for lib in libraries: distutils.log.info(\"building '%s' library\", lib.name)", "a list of (name,value) tuples for (name, value) in self.define: self.compiler.define_macro(name, value) if", "built in # compiler with distutils. I am not able to use the", "really just temporary build # by-products, at least from the point of view", "we have written self.compiler.spawn = self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if self.include_dirs is", "self.mkpath customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None:", "from distutils.ccompiler import new_compiler self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) # replace", "'force') ) if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries", "options for all libraries self.include_dirs = None self.define = None self.undef = None", "str): self.include_dirs = self.include_dirs.split(os.pathsep) def run(self): if not self.libraries: return # we are", "lib in libraries: distutils.log.info(\"building '%s' library\", lib.name) try: lib.build(self) except NotImplementedError: # First,", "directory it makes. This congests the # output unnecessarily. def mkpath(self, name, mode=0o777):", "putting object # files in a temporary build directory.) include_dirs = lib.include_dirs objects", "('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force') ) if not", "out each and every directory it makes. This congests the # output unnecessarily.", "def spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs) # we override the compilers mkpath so", "in self.libraries: filenames.extend(lib.sources) return filenames def build_libraries(self, libraries): for lib in libraries: distutils.log.info(\"building", "macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): if not isinstance(libraries, (list, tuple)):", "Library): raise distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs to be instances of 'Library' not", "# files in a temporary build directory.) include_dirs = lib.include_dirs objects = self.compiler.compile(", "# for a build. Now. There is a method \"build\" that ghets called.", "# output unnecessarily. def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0 )", "self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs,", "utf-8 -*- import distutils import distutils.errors import distutils.core import distutils.command.build_clib import distutils.log from", "= new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) # replace the compilers spawn and mkpath", "not able to use the distutils compiler in a threaded scenario # because", "verbose level of 1 which # which prints out each and every directory", "include_dirs = lib.include_dirs objects = self.compiler.compile( lib.sources, output_dir=self.build_temp, macros=lib.macros, include_dirs=include_dirs, debug=self.debug ) #", "with debugging information\"), ('force', 'f', \"forcibly build everything (ignore file timestamps)\"), ] boolean_options", "options open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force',", "None: for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): if not isinstance(libraries,", "isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options need to be either a list", "# by-products, at least from the point of view of building Python #", "of using a tuple and a dict to provide compiler options I decided", "None # List of libraries to build self.libraries = None # Compilation options", "= None self.undef = None self.debug = None self.force = 0 self.compiler =", "defined by the \"build\" command. This is because # I think that C", "of building Python # extensions -- but I want to keep my options", "is valid -- 'check_library_list()' is # called from 'finalize_options()', so it should be!", "if self.define is not None: # 'define' option is a list of (name,value)", "None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) def", "for all libraries self.include_dirs = None self.define = None self.undef = None self.debug", "build_clib(distutils.core.Command): user_options = [ ('build-clib=', 'b', \"directory to build C/C++ libraries to\"), ('build-temp=',", "(list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries' options need to be either a list or", "the compilers mkpath so we can inject the verbose option. # the compilers", "library\", lib.name) try: lib.build(self) except NotImplementedError: # First, compile the source code to", "self.include_dirs.split(os.pathsep) def run(self): if not self.libraries: return # we are leaving this here", "lib_names.append(lib.name) return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for lib in self.libraries:", "build-clib and build-temp default # to build-temp as defined by the \"build\" command.", "confusing: both build-clib and build-temp default # to build-temp as defined by the", "isn't really linking -- it just # builds an archive. Whatever.) self.compiler.create_static_lib( objects,", "self.undef is not None: for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries):", "Instead of using a tuple and a dict to provide compiler options I", "customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: #", "distutils.log from distutils.sysconfig import customize_compiler import distutils.dir_util import os from . import spawn_process", "Compilation options for all libraries self.include_dirs = None self.define = None self.undef =", "build_libraries(self, libraries): for lib in libraries: distutils.log.info(\"building '%s' library\", lib.name) try: lib.build(self) except", "distutils.dir_util import os from . import spawn_process from .library.library_base import Library class build_clib(distutils.core.Command):", "provide compiler options I decided to make a class # call Library. This", "instread of the internal compiler. I created a wrapper class around the #", "a multi threaded compiling process. we no longer use the built in #", "not None: # 'define' option is a list of (name,value) tuples for (name,", "= None self.debug = None self.force = 0 self.compiler = None def finalize_options(self):", "initialize_options(self): self.build_clib = None self.build_temp = None # List of libraries to build", "output unnecessarily. def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0 ) def", "def build_libraries(self, libraries): for lib in libraries: distutils.log.info(\"building '%s' library\", lib.name) try: lib.build(self)", "= self.mkpath customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not", "the # output unnecessarily. def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0", "use the distutils compiler in a threaded scenario # because it was not", "not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs", "around the # Library which institutes a multi threaded compiling process. we no", "self.define = None self.undef = None self.debug = None self.force = 0 self.compiler", "this isn't really linking -- it just # builds an archive. Whatever.) self.compiler.create_static_lib(", "for lib in self.libraries: lib_names.append(lib.name) return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames = []", "each and every directory it makes. This congests the # output unnecessarily. def", "a static library. # (On Unix at least, this isn't really linking --", "together into a static library. # (On Unix at least, this isn't really", "libraries to\"), ('build-temp=', 't', \"directory to put temporary build by-products\"), ('debug', 'g', \"compile", "designed to be thread safe and things get all kinds of funky. from", "for (name, value) in self.define: self.compiler.define_macro(name, value) if self.undef is not None: for", "method is overridden # it is what gets used instread of the internal", "raise distutils.errors.DistutilsSetupError( \"contents of 'libraries' needs to be instances of 'Library' not \"", "# I think that C libraries are really just temporary build # by-products,", "Library class build_clib(distutils.core.Command): user_options = [ ('build-clib=', 'b', \"directory to build C/C++ libraries", "in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): if not isinstance(libraries, (list, tuple)): raise", "things get all kinds of funky. from distutils.ccompiler import new_compiler self.compiler = new_compiler(", "distutils.errors.DistutilsSetupError( \"'libraries' options need to be either a list or a tuple.\") for", ".library.library_base import Library class build_clib(distutils.core.Command): user_options = [ ('build-clib=', 'b', \"directory to build", "so we can inject the verbose option. # the compilers version does not", ") if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries = self.distribution.libraries self.check_library_list(self.libraries)", "think that C libraries are really just temporary build # by-products, at least", "called. if this method is overridden # it is what gets used instread", "to build-temp as defined by the \"build\" command. This is because # I", "('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force') ) if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not", "libraries): for lib in libraries: distutils.log.info(\"building '%s' library\", lib.name) try: lib.build(self) except NotImplementedError:", "import new_compiler self.compiler = new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) # replace the compilers", "tuples for (name, value) in self.define: self.compiler.define_macro(name, value) if self.undef is not None:", "= None # List of libraries to build self.libraries = None # Compilation", "point of view of building Python # extensions -- but I want to", "'t', \"directory to put temporary build by-products\"), ('debug', 'g', \"compile with debugging information\"),", "self.build_clib = None self.build_temp = None # List of libraries to build self.libraries", "is what gets used instread of the internal compiler. I created a wrapper", "'b', \"directory to build C/C++ libraries to\"), ('build-temp=', 't', \"directory to put temporary", "lib_names = [] for lib in self.libraries: lib_names.append(lib.name) return lib_names def get_source_files(self): self.check_library_list(self.libraries)", "no longer use the built in # compiler with distutils. I am not", "from . import spawn_process from .library.library_base import Library class build_clib(distutils.core.Command): user_options = [", "a dict to provide compiler options I decided to make a class #", "build-temp as defined by the \"build\" command. This is because # I think", "the onces that we have written self.compiler.spawn = self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler)", "# we override the compilers mkpath so we can inject the verbose option.", "should probably change to putting object # files in a temporary build directory.)", "get all kinds of funky. from distutils.ccompiler import new_compiler self.compiler = new_compiler( compiler=self.compiler,", "\" + str(type(lib)) ) # lib.validate() def get_library_names(self): # Assume the library list", "= None def finalize_options(self): # This might be confusing: both build-clib and build-temp", "# (On Unix at least, this isn't really linking -- it just #", "new_compiler( compiler=self.compiler, dry_run=self.dry_run, force=self.force ) # replace the compilers spawn and mkpath with", "('force', 'force') ) if not os.path.exists(self.build_clib): os.makedirs(self.build_clib) if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) self.libraries =", "self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) def run(self): if not", "This class is what will hold all of the various build components needed", "the object files together into a static library. # (On Unix at least,", "# replace the compilers spawn and mkpath with the onces that we have", "option. # the compilers version does not allow for setting of a verbose", "dry_run=self.dry_run, force=self.force ) # replace the compilers spawn and mkpath with the onces", "of libraries to build self.libraries = None # Compilation options for all libraries", "self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): if not isinstance(libraries, (list, tuple)): raise distutils.errors.DistutilsSetupError( \"'libraries'", "# Library which institutes a multi threaded compiling process. we no longer use", "by-products, at least from the point of view of building Python # extensions", "('debug', 'g', \"compile with debugging information\"), ('force', 'f', \"forcibly build everything (ignore file", "linking -- it just # builds an archive. Whatever.) self.compiler.create_static_lib( objects, lib.name, output_dir=self.build_clib,", "distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self): self.build_clib = None self.build_temp =", "# This might be confusing: both build-clib and build-temp default # to build-temp", "open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force')", "if wanted the built in compiler for distutils can be used # Instead", "library list is valid -- 'check_library_list()' is # called from 'finalize_options()', so it", "self.build_temp = None # List of libraries to build self.libraries = None #", "\"link\" the object files together into a static library. # (On Unix at", "the compilers spawn and mkpath with the onces that we have written self.compiler.spawn", "method \"build\" that ghets called. if this method is overridden # it is", "used # Instead of using a tuple and a dict to provide compiler", "is what will hold all of the various build components needed # for", "None: # 'define' option is a list of (name,value) tuples for (name, value)", "'f', \"forcibly build everything (ignore file timestamps)\"), ] boolean_options = ['debug', 'force'] help_options", "version does not allow for setting of a verbose level # and distutils.dir_util.mkpath", "it is what gets used instread of the internal compiler. I created a", "command. This is because # I think that C libraries are really just", "distutils compiler in a threaded scenario # because it was not designed to", "[ ('help-compiler', None, \"list available compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self, *args, **kwargs): spawn_process.spawn(*args,", "0 self.compiler = None def finalize_options(self): # This might be confusing: both build-clib", "self.compiler.define_macro(name, value) if self.undef is not None: for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries)", "] boolean_options = ['debug', 'force'] help_options = [ ('help-compiler', None, \"list available compilers\",", "self.debug = None self.force = 0 self.compiler = None def finalize_options(self): # This", "we override the compilers mkpath so we can inject the verbose option. #", "return # we are leaving this here so if wanted the built in", "force=self.force ) # replace the compilers spawn and mkpath with the onces that", "in libraries: distutils.log.info(\"building '%s' library\", lib.name) try: lib.build(self) except NotImplementedError: # First, compile", "libraries are really just temporary build # by-products, at least from the point", "which # which prints out each and every directory it makes. This congests", "a tuple and a dict to provide compiler options I decided to make", "and mkpath with the onces that we have written self.compiler.spawn = self.spawn self.compiler.mkpath", "+ str(type(lib)) ) # lib.validate() def get_library_names(self): # Assume the library list is", "1 which # which prints out each and every directory it makes. This", "scenario # because it was not designed to be thread safe and things", "level # and distutils.dir_util.mkpath defaults to a verbose level of 1 which #", "extensions -- but I want to keep my options open. self.set_undefined_options( 'build', ('build_temp',", "compiler. I created a wrapper class around the # Library which institutes a", "raise distutils.errors.DistutilsSetupError( \"'libraries' options need to be either a list or a tuple.\")", "\"contents of 'libraries' needs to be instances of 'Library' not \" + str(type(lib))", "import Library class build_clib(distutils.core.Command): user_options = [ ('build-clib=', 'b', \"directory to build C/C++", "-- it just # builds an archive. Whatever.) self.compiler.create_static_lib( objects, lib.name, output_dir=self.build_clib, debug=self.debug", "name, mode, dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self): self.build_clib = None self.build_temp = None", "Library which institutes a multi threaded compiling process. we no longer use the", "written self.compiler.spawn = self.spawn self.compiler.mkpath = self.mkpath customize_compiler(self.compiler) if self.include_dirs is not None:", "of (name,value) tuples for (name, value) in self.define: self.compiler.define_macro(name, value) if self.undef is", "tuple and a dict to provide compiler options I decided to make a", "verbose=0 ) def initialize_options(self): self.build_clib = None self.build_temp = None # List of", "should be! if not self.libraries: return None lib_names = [] for lib in", "does not allow for setting of a verbose level # and distutils.dir_util.mkpath defaults", "self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs =", "hold all of the various build components needed # for a build. Now.", "debugging information\"), ('force', 'f', \"forcibly build everything (ignore file timestamps)\"), ] boolean_options =", "def mkpath(self, name, mode=0o777): distutils.dir_util.mkpath( name, mode, dry_run=self.compiler.dry_run, verbose=0 ) def initialize_options(self): self.build_clib", "but I want to keep my options open. self.set_undefined_options( 'build', ('build_temp', 'build_clib'), ('build_temp',", "None self.force = 0 self.compiler = None def finalize_options(self): # This might be", "\"list available compilers\", distutils.command.build_clib.show_compilers), ] def spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs) # we", "'libraries' needs to be instances of 'Library' not \" + str(type(lib)) ) #", "not \" + str(type(lib)) ) # lib.validate() def get_library_names(self): # Assume the library", "# List of libraries to build self.libraries = None # Compilation options for", "self.libraries: filenames.extend(lib.sources) return filenames def build_libraries(self, libraries): for lib in libraries: distutils.log.info(\"building '%s'", "Now \"link\" the object files together into a static library. # (On Unix", "a verbose level # and distutils.dir_util.mkpath defaults to a verbose level of 1", "compiler for distutils can be used # Instead of using a tuple and", "('build-temp=', 't', \"directory to put temporary build by-products\"), ('debug', 'g', \"compile with debugging", "we no longer use the built in # compiler with distutils. I am", "'g', \"compile with debugging information\"), ('force', 'f', \"forcibly build everything (ignore file timestamps)\"),", "import distutils.dir_util import os from . import spawn_process from .library.library_base import Library class", "the built in # compiler with distutils. I am not able to use", "self.libraries = self.distribution.libraries self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or []", "None self.debug = None self.force = 0 self.compiler = None def finalize_options(self): #", "used instread of the internal compiler. I created a wrapper class around the", "by the \"build\" command. This is because # I think that C libraries", "level of 1 which # which prints out each and every directory it", "and a dict to provide compiler options I decided to make a class", "distutils.command.build_clib.show_compilers), ] def spawn(self, *args, **kwargs): spawn_process.spawn(*args, **kwargs) # we override the compilers", "= self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) def run(self): if", "am not able to use the distutils compiler in a threaded scenario #", "= self.include_dirs.split(os.pathsep) def run(self): if not self.libraries: return # we are leaving this", "distutils import distutils.errors import distutils.core import distutils.command.build_clib import distutils.log from distutils.sysconfig import customize_compiler", "try: lib.build(self) except NotImplementedError: # First, compile the source code to object files", "get_library_names(self): # Assume the library list is valid -- 'check_library_list()' is # called", "both build-clib and build-temp default # to build-temp as defined by the \"build\"", "# to build-temp as defined by the \"build\" command. This is because #" ]
[ "'Extra maxima:': state = 'extra-scores' continue elif line.startswith('Total'): break key, value = proc_line(line)", "open(fname, 'rb') as fobj: config = toml.load(fobj) for field in self.required_fields: if not", "line.startswith('Total'): break key, value = proc_line(line) e_scores[key] = float(value) return o_scores, e_scores def", "field in config: raise ConfigError(f'{fname} should have \"{field}\" field') return config @property def", "== '': continue if state == 'searching': if line == 'Ordinary maxima:': state", "value = proc_line(line) o_scores[key] = float(value) elif state == 'extra-scores': if line.startswith('Total'): break", "be {fname} in current directory') with open(fname, 'rb') as fobj: config = toml.load(fobj)", "= 'gdconfig.toml' required_fields = ('year',) default_log = 'marking_log.md' def __init__(self): self._params = None", "lines = [f'* {k}: {v}' for k, v in o_scores.items()] if e_scores: lines.append('')", "def params(self): if self._params is None: self._params = self._read_config() return self._params def _read_config(self):", "marks_fname(self): return f'marks_{self.year}.csv' @property def nb_template(self): template = self.get('notebooks', {}).get('template') if template is", "if e_scores: lines.append('') lines += [f'* {k}: {v}' for k, v in e_scores.items()]", "self._params = None def __getitem__(self, key): return self.params[key] def __contains__(self, key): return key", "def year(self): return self.params['year'] @property def student_fname(self): return f'students_{self.year}.csv' @property def marks_fname(self): return", "fobj.read() lines = contents.splitlines() state = 'searching' o_scores = OrderedDict() e_scores = OrderedDict()", "print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish, 'read'): contents = fileish.read() else: with open(fileish, 'rt')", "if state == 'searching': if line == 'Ordinary maxima:': state = 'ordinary-scores' elif", "return f'marks_{self.year}.csv' @property def nb_template(self): template = self.get('notebooks', {}).get('template') if template is None:", "def score_lines(self): return get_score_lines(*self.scores) CONFIG = Config() def print_year(): print(CONFIG['year']) def get_scores(fileish): if", "state = 'extra-scores' continue elif line.startswith('Total'): break key, value = proc_line(line) o_scores[key] =", "*args, **kwargs): return self.params.get(key, *args, **kwargs) @property def params(self): if self._params is None:", "f'Should be {fname} in current directory') with open(fname, 'rb') as fobj: config =", "float(value) elif state == 'extra-scores': if line.startswith('Total'): break key, value = proc_line(line) e_scores[key]", "element') return [v.strip() for v in line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines = [f'*", "fileish.read() else: with open(fileish, 'rt') as fobj: contents = fobj.read() lines = contents.splitlines()", "does not exist') return fn @property def year(self): return self.params['year'] @property def student_fname(self):", "**kwargs) @property def params(self): if self._params is None: self._params = self._read_config() return self._params", "float(value) return o_scores, e_scores def proc_line(line): if not line.startswith('*'): raise ValueError('Invalid list element')", "not field in config: raise ConfigError(f'{fname} should have \"{field}\" field') return config @property", "for field in self.required_fields: if not field in config: raise ConfigError(f'{fname} should have", "return pd.read_csv(self.student_fname) @property def scores(self): return get_scores(self.marking_log) @property def score_lines(self): return get_score_lines(*self.scores) CONFIG", "= proc_line(line) o_scores[key] = float(value) elif state == 'extra-scores': if line.startswith('Total'): break key,", "= ('year',) default_log = 'marking_log.md' def __init__(self): self._params = None def __getitem__(self, key):", "def __contains__(self, key): return key in self.params def get(self, key, *args, **kwargs): return", "fobj: contents = fobj.read() lines = contents.splitlines() state = 'searching' o_scores = OrderedDict()", "line.startswith('*'): raise ValueError('Invalid list element') return [v.strip() for v in line[1:].split(':')] def get_score_lines(o_scores,", "@property def params(self): if self._params is None: self._params = self._read_config() return self._params def", "__getitem__(self, key): return self.params[key] def __contains__(self, key): return key in self.params def get(self,", "= line.strip() if line == '': continue if state == 'searching': if line", "if not exists(fn): raise ConfigError(f'Log {fn} does not exist') return fn @property def", "= proc_line(line) e_scores[key] = float(value) return o_scores, e_scores def proc_line(line): if not line.startswith('*'):", "state = 'ordinary-scores' elif state == 'ordinary-scores': if line == 'Extra maxima:': state", "self.params.get(key, *args, **kwargs) @property def params(self): if self._params is None: self._params = self._read_config()", "[v.strip() for v in line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines = [f'* {k}: {v}'", "= fileish.read() else: with open(fileish, 'rt') as fobj: contents = fobj.read() lines =", "self.params def get(self, key, *args, **kwargs): return self.params.get(key, *args, **kwargs) @property def params(self):", "f'marks_{self.year}.csv' @property def nb_template(self): template = self.get('notebooks', {}).get('template') if template is None: return", "@property def marks_fname(self): return f'marks_{self.year}.csv' @property def nb_template(self): template = self.get('notebooks', {}).get('template') if", "pd.read_csv(self.student_fname) @property def scores(self): return get_scores(self.marking_log) @property def score_lines(self): return get_score_lines(*self.scores) CONFIG =", "'extra-scores': if line.startswith('Total'): break key, value = proc_line(line) e_scores[key] = float(value) return o_scores,", "raise ConfigError(f'{fname} should have \"{field}\" field') return config @property def marking_log(self): fn =", "self.config_fname if not exists(fname): raise ConfigError( f'Should be {fname} in current directory') with", "value = proc_line(line) e_scores[key] = float(value) return o_scores, e_scores def proc_line(line): if not", "exists(self.student_fname): raise ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname) @property def scores(self): return get_scores(self.marking_log) @property", "CONFIG = Config() def print_year(): print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish, 'read'): contents =", "pytoml as toml import pandas as pd class ConfigError(RuntimeError): pass class Config: config_fname", "return [v.strip() for v in line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines = [f'* {k}:", "return f'students_{self.year}.csv' @property def marks_fname(self): return f'marks_{self.year}.csv' @property def nb_template(self): template = self.get('notebooks',", "student_fname(self): return f'students_{self.year}.csv' @property def marks_fname(self): return f'marks_{self.year}.csv' @property def nb_template(self): template =", "required_fields = ('year',) default_log = 'marking_log.md' def __init__(self): self._params = None def __getitem__(self,", "'searching' o_scores = OrderedDict() e_scores = OrderedDict() for i, line in enumerate(lines): line", "for k, v in o_scores.items()] if e_scores: lines.append('') lines += [f'* {k}: {v}'", "def get_score_lines(o_scores, e_scores): lines = [f'* {k}: {v}' for k, v in o_scores.items()]", "with open(fname, 'rb') as fobj: config = toml.load(fobj) for field in self.required_fields: if", "raise ConfigError( f'Should be {fname} in current directory') with open(fname, 'rb') as fobj:", "as pd class ConfigError(RuntimeError): pass class Config: config_fname = 'gdconfig.toml' required_fields = ('year',)", "def __getitem__(self, key): return self.params[key] def __contains__(self, key): return key in self.params def", "exists, join as pjoin from collections import OrderedDict import pytoml as toml import", "get_score_lines(*self.scores) CONFIG = Config() def print_year(): print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish, 'read'): contents", "@property def student_fname(self): return f'students_{self.year}.csv' @property def marks_fname(self): return f'marks_{self.year}.csv' @property def nb_template(self):", "= self._read_config() return self._params def _read_config(self): fname = self.config_fname if not exists(fname): raise", "return pjoin(*template.split('/')) def get_students(self): if not exists(self.student_fname): raise ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname)", "line in enumerate(lines): line = line.strip() if line == '': continue if state", "line.strip() if line == '': continue if state == 'searching': if line ==", "key, value = proc_line(line) e_scores[key] = float(value) return o_scores, e_scores def proc_line(line): if", "'': continue if state == 'searching': if line == 'Ordinary maxima:': state =", "self.default_log) if not exists(fn): raise ConfigError(f'Log {fn} does not exist') return fn @property", "== 'Extra maxima:': state = 'extra-scores' continue elif line.startswith('Total'): break key, value =", "proc_line(line): if not line.startswith('*'): raise ValueError('Invalid list element') return [v.strip() for v in", "<filename>gradools/mconfig.py \"\"\" Tools for grading \"\"\" from os.path import exists, join as pjoin", "return o_scores, e_scores def proc_line(line): if not line.startswith('*'): raise ValueError('Invalid list element') return", "maxima:': state = 'extra-scores' continue elif line.startswith('Total'): break key, value = proc_line(line) o_scores[key]", "def nb_template(self): template = self.get('notebooks', {}).get('template') if template is None: return None return", "from collections import OrderedDict import pytoml as toml import pandas as pd class", "{v}' for k, v in o_scores.items()] if e_scores: lines.append('') lines += [f'* {k}:", "self._params def _read_config(self): fname = self.config_fname if not exists(fname): raise ConfigError( f'Should be", "default_log = 'marking_log.md' def __init__(self): self._params = None def __getitem__(self, key): return self.params[key]", "in enumerate(lines): line = line.strip() if line == '': continue if state ==", "import pandas as pd class ConfigError(RuntimeError): pass class Config: config_fname = 'gdconfig.toml' required_fields", "for i, line in enumerate(lines): line = line.strip() if line == '': continue", "= self.get('log', self.default_log) if not exists(fn): raise ConfigError(f'Log {fn} does not exist') return", "is None: return None return pjoin(*template.split('/')) def get_students(self): if not exists(self.student_fname): raise ConfigError('Run", "fobj: config = toml.load(fobj) for field in self.required_fields: if not field in config:", "key): return self.params[key] def __contains__(self, key): return key in self.params def get(self, key,", "'extra-scores' continue elif line.startswith('Total'): break key, value = proc_line(line) o_scores[key] = float(value) elif", "config_fname = 'gdconfig.toml' required_fields = ('year',) default_log = 'marking_log.md' def __init__(self): self._params =", "return fn @property def year(self): return self.params['year'] @property def student_fname(self): return f'students_{self.year}.csv' @property", "in line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines = [f'* {k}: {v}' for k, v", "def scores(self): return get_scores(self.marking_log) @property def score_lines(self): return get_score_lines(*self.scores) CONFIG = Config() def", "gdo-mkstable here') return pd.read_csv(self.student_fname) @property def scores(self): return get_scores(self.marking_log) @property def score_lines(self): return", "as toml import pandas as pd class ConfigError(RuntimeError): pass class Config: config_fname =", "ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname) @property def scores(self): return get_scores(self.marking_log) @property def score_lines(self):", "e_scores = OrderedDict() for i, line in enumerate(lines): line = line.strip() if line", "@property def score_lines(self): return get_score_lines(*self.scores) CONFIG = Config() def print_year(): print(CONFIG['year']) def get_scores(fileish):", "OrderedDict import pytoml as toml import pandas as pd class ConfigError(RuntimeError): pass class", "break key, value = proc_line(line) e_scores[key] = float(value) return o_scores, e_scores def proc_line(line):", "return self.params[key] def __contains__(self, key): return key in self.params def get(self, key, *args,", "in current directory') with open(fname, 'rb') as fobj: config = toml.load(fobj) for field", "e_scores[key] = float(value) return o_scores, e_scores def proc_line(line): if not line.startswith('*'): raise ValueError('Invalid", "in self.params def get(self, key, *args, **kwargs): return self.params.get(key, *args, **kwargs) @property def", "if not line.startswith('*'): raise ValueError('Invalid list element') return [v.strip() for v in line[1:].split(':')]", "if line == '': continue if state == 'searching': if line == 'Ordinary", "= self.config_fname if not exists(fname): raise ConfigError( f'Should be {fname} in current directory')", "get_students(self): if not exists(self.student_fname): raise ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname) @property def scores(self):", "{}).get('template') if template is None: return None return pjoin(*template.split('/')) def get_students(self): if not", "contents = fileish.read() else: with open(fileish, 'rt') as fobj: contents = fobj.read() lines", "[f'* {k}: {v}' for k, v in o_scores.items()] if e_scores: lines.append('') lines +=", "marking_log(self): fn = self.get('log', self.default_log) if not exists(fn): raise ConfigError(f'Log {fn} does not", "v in line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines = [f'* {k}: {v}' for k,", "'rt') as fobj: contents = fobj.read() lines = contents.splitlines() state = 'searching' o_scores", "Config: config_fname = 'gdconfig.toml' required_fields = ('year',) default_log = 'marking_log.md' def __init__(self): self._params", "contents.splitlines() state = 'searching' o_scores = OrderedDict() e_scores = OrderedDict() for i, line", "OrderedDict() e_scores = OrderedDict() for i, line in enumerate(lines): line = line.strip() if", "key, value = proc_line(line) o_scores[key] = float(value) elif state == 'extra-scores': if line.startswith('Total'):", "os.path import exists, join as pjoin from collections import OrderedDict import pytoml as", "if template is None: return None return pjoin(*template.split('/')) def get_students(self): if not exists(self.student_fname):", "return self._params def _read_config(self): fname = self.config_fname if not exists(fname): raise ConfigError( f'Should", "if line.startswith('Total'): break key, value = proc_line(line) e_scores[key] = float(value) return o_scores, e_scores", "{fn} does not exist') return fn @property def year(self): return self.params['year'] @property def", "def marking_log(self): fn = self.get('log', self.default_log) if not exists(fn): raise ConfigError(f'Log {fn} does", "open(fileish, 'rt') as fobj: contents = fobj.read() lines = contents.splitlines() state = 'searching'", "if line == 'Extra maxima:': state = 'extra-scores' continue elif line.startswith('Total'): break key,", "field') return config @property def marking_log(self): fn = self.get('log', self.default_log) if not exists(fn):", "'Ordinary maxima:': state = 'ordinary-scores' elif state == 'ordinary-scores': if line == 'Extra", "fn = self.get('log', self.default_log) if not exists(fn): raise ConfigError(f'Log {fn} does not exist')", "elif line.startswith('Total'): break key, value = proc_line(line) o_scores[key] = float(value) elif state ==", "with open(fileish, 'rt') as fobj: contents = fobj.read() lines = contents.splitlines() state =", "e_scores): lines = [f'* {k}: {v}' for k, v in o_scores.items()] if e_scores:", "not exists(fname): raise ConfigError( f'Should be {fname} in current directory') with open(fname, 'rb')", "@property def marking_log(self): fn = self.get('log', self.default_log) if not exists(fn): raise ConfigError(f'Log {fn}", "== 'Ordinary maxima:': state = 'ordinary-scores' elif state == 'ordinary-scores': if line ==", "'rb') as fobj: config = toml.load(fobj) for field in self.required_fields: if not field", "None: self._params = self._read_config() return self._params def _read_config(self): fname = self.config_fname if not", "= toml.load(fobj) for field in self.required_fields: if not field in config: raise ConfigError(f'{fname}", "= 'extra-scores' continue elif line.startswith('Total'): break key, value = proc_line(line) o_scores[key] = float(value)", "from os.path import exists, join as pjoin from collections import OrderedDict import pytoml", "lines = contents.splitlines() state = 'searching' o_scores = OrderedDict() e_scores = OrderedDict() for", "k, v in o_scores.items()] if e_scores: lines.append('') lines += [f'* {k}: {v}' for", "e_scores: lines.append('') lines += [f'* {k}: {v}' for k, v in e_scores.items()] return", "self.params[key] def __contains__(self, key): return key in self.params def get(self, key, *args, **kwargs):", "if not exists(fname): raise ConfigError( f'Should be {fname} in current directory') with open(fname,", "list element') return [v.strip() for v in line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines =", "ConfigError( f'Should be {fname} in current directory') with open(fname, 'rb') as fobj: config", "return None return pjoin(*template.split('/')) def get_students(self): if not exists(self.student_fname): raise ConfigError('Run gdo-mkstable here')", "def student_fname(self): return f'students_{self.year}.csv' @property def marks_fname(self): return f'marks_{self.year}.csv' @property def nb_template(self): template", "('year',) default_log = 'marking_log.md' def __init__(self): self._params = None def __getitem__(self, key): return", "self.required_fields: if not field in config: raise ConfigError(f'{fname} should have \"{field}\" field') return", "raise ValueError('Invalid list element') return [v.strip() for v in line[1:].split(':')] def get_score_lines(o_scores, e_scores):", "@property def nb_template(self): template = self.get('notebooks', {}).get('template') if template is None: return None", "pd class ConfigError(RuntimeError): pass class Config: config_fname = 'gdconfig.toml' required_fields = ('year',) default_log", "year(self): return self.params['year'] @property def student_fname(self): return f'students_{self.year}.csv' @property def marks_fname(self): return f'marks_{self.year}.csv'", "break key, value = proc_line(line) o_scores[key] = float(value) elif state == 'extra-scores': if", "toml import pandas as pd class ConfigError(RuntimeError): pass class Config: config_fname = 'gdconfig.toml'", "not exists(fn): raise ConfigError(f'Log {fn} does not exist') return fn @property def year(self):", "ValueError('Invalid list element') return [v.strip() for v in line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines", "= float(value) elif state == 'extra-scores': if line.startswith('Total'): break key, value = proc_line(line)", "**kwargs): return self.params.get(key, *args, **kwargs) @property def params(self): if self._params is None: self._params", "self._params = self._read_config() return self._params def _read_config(self): fname = self.config_fname if not exists(fname):", "pass class Config: config_fname = 'gdconfig.toml' required_fields = ('year',) default_log = 'marking_log.md' def", "= 'searching' o_scores = OrderedDict() e_scores = OrderedDict() for i, line in enumerate(lines):", "elif state == 'ordinary-scores': if line == 'Extra maxima:': state = 'extra-scores' continue", "have \"{field}\" field') return config @property def marking_log(self): fn = self.get('log', self.default_log) if", "key in self.params def get(self, key, *args, **kwargs): return self.params.get(key, *args, **kwargs) @property", "continue if state == 'searching': if line == 'Ordinary maxima:': state = 'ordinary-scores'", "if hasattr(fileish, 'read'): contents = fileish.read() else: with open(fileish, 'rt') as fobj: contents", "as fobj: config = toml.load(fobj) for field in self.required_fields: if not field in", "= OrderedDict() for i, line in enumerate(lines): line = line.strip() if line ==", "def marks_fname(self): return f'marks_{self.year}.csv' @property def nb_template(self): template = self.get('notebooks', {}).get('template') if template", "= fobj.read() lines = contents.splitlines() state = 'searching' o_scores = OrderedDict() e_scores =", "line == 'Ordinary maxima:': state = 'ordinary-scores' elif state == 'ordinary-scores': if line", "get_scores(self.marking_log) @property def score_lines(self): return get_score_lines(*self.scores) CONFIG = Config() def print_year(): print(CONFIG['year']) def", "class ConfigError(RuntimeError): pass class Config: config_fname = 'gdconfig.toml' required_fields = ('year',) default_log =", "in self.required_fields: if not field in config: raise ConfigError(f'{fname} should have \"{field}\" field')", "@property def year(self): return self.params['year'] @property def student_fname(self): return f'students_{self.year}.csv' @property def marks_fname(self):", "grading \"\"\" from os.path import exists, join as pjoin from collections import OrderedDict", "ConfigError(f'{fname} should have \"{field}\" field') return config @property def marking_log(self): fn = self.get('log',", "elif state == 'extra-scores': if line.startswith('Total'): break key, value = proc_line(line) e_scores[key] =", "o_scores, e_scores def proc_line(line): if not line.startswith('*'): raise ValueError('Invalid list element') return [v.strip()", "== 'extra-scores': if line.startswith('Total'): break key, value = proc_line(line) e_scores[key] = float(value) return", "= 'ordinary-scores' elif state == 'ordinary-scores': if line == 'Extra maxima:': state =", "state == 'extra-scores': if line.startswith('Total'): break key, value = proc_line(line) e_scores[key] = float(value)", "field in self.required_fields: if not field in config: raise ConfigError(f'{fname} should have \"{field}\"", "f'students_{self.year}.csv' @property def marks_fname(self): return f'marks_{self.year}.csv' @property def nb_template(self): template = self.get('notebooks', {}).get('template')", "\"{field}\" field') return config @property def marking_log(self): fn = self.get('log', self.default_log) if not", "exists(fn): raise ConfigError(f'Log {fn} does not exist') return fn @property def year(self): return", "scores(self): return get_scores(self.marking_log) @property def score_lines(self): return get_score_lines(*self.scores) CONFIG = Config() def print_year():", "line == '': continue if state == 'searching': if line == 'Ordinary maxima:':", "'marking_log.md' def __init__(self): self._params = None def __getitem__(self, key): return self.params[key] def __contains__(self,", "not exists(self.student_fname): raise ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname) @property def scores(self): return get_scores(self.marking_log)", "pjoin from collections import OrderedDict import pytoml as toml import pandas as pd", "v in o_scores.items()] if e_scores: lines.append('') lines += [f'* {k}: {v}' for k,", "_read_config(self): fname = self.config_fname if not exists(fname): raise ConfigError( f'Should be {fname} in", "def get_scores(fileish): if hasattr(fileish, 'read'): contents = fileish.read() else: with open(fileish, 'rt') as", "\"\"\" from os.path import exists, join as pjoin from collections import OrderedDict import", "fname = self.config_fname if not exists(fname): raise ConfigError( f'Should be {fname} in current", "self._params is None: self._params = self._read_config() return self._params def _read_config(self): fname = self.config_fname", "def get(self, key, *args, **kwargs): return self.params.get(key, *args, **kwargs) @property def params(self): if", "import exists, join as pjoin from collections import OrderedDict import pytoml as toml", "get_scores(fileish): if hasattr(fileish, 'read'): contents = fileish.read() else: with open(fileish, 'rt') as fobj:", "o_scores[key] = float(value) elif state == 'extra-scores': if line.startswith('Total'): break key, value =", "in config: raise ConfigError(f'{fname} should have \"{field}\" field') return config @property def marking_log(self):", "'read'): contents = fileish.read() else: with open(fileish, 'rt') as fobj: contents = fobj.read()", "__init__(self): self._params = None def __getitem__(self, key): return self.params[key] def __contains__(self, key): return", "else: with open(fileish, 'rt') as fobj: contents = fobj.read() lines = contents.splitlines() state", "return key in self.params def get(self, key, *args, **kwargs): return self.params.get(key, *args, **kwargs)", "= None def __getitem__(self, key): return self.params[key] def __contains__(self, key): return key in", "OrderedDict() for i, line in enumerate(lines): line = line.strip() if line == '':", "def proc_line(line): if not line.startswith('*'): raise ValueError('Invalid list element') return [v.strip() for v", "is None: self._params = self._read_config() return self._params def _read_config(self): fname = self.config_fname if", "proc_line(line) e_scores[key] = float(value) return o_scores, e_scores def proc_line(line): if not line.startswith('*'): raise", "state = 'searching' o_scores = OrderedDict() e_scores = OrderedDict() for i, line in", "'ordinary-scores': if line == 'Extra maxima:': state = 'extra-scores' continue elif line.startswith('Total'): break", "if not field in config: raise ConfigError(f'{fname} should have \"{field}\" field') return config", "enumerate(lines): line = line.strip() if line == '': continue if state == 'searching':", "exist') return fn @property def year(self): return self.params['year'] @property def student_fname(self): return f'students_{self.year}.csv'", "template = self.get('notebooks', {}).get('template') if template is None: return None return pjoin(*template.split('/')) def", "= OrderedDict() e_scores = OrderedDict() for i, line in enumerate(lines): line = line.strip()", "if self._params is None: self._params = self._read_config() return self._params def _read_config(self): fname =", "raise ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname) @property def scores(self): return get_scores(self.marking_log) @property def", "here') return pd.read_csv(self.student_fname) @property def scores(self): return get_scores(self.marking_log) @property def score_lines(self): return get_score_lines(*self.scores)", "state == 'searching': if line == 'Ordinary maxima:': state = 'ordinary-scores' elif state", "contents = fobj.read() lines = contents.splitlines() state = 'searching' o_scores = OrderedDict() e_scores", "hasattr(fileish, 'read'): contents = fileish.read() else: with open(fileish, 'rt') as fobj: contents =", "in o_scores.items()] if e_scores: lines.append('') lines += [f'* {k}: {v}' for k, v", "print_year(): print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish, 'read'): contents = fileish.read() else: with open(fileish,", "config @property def marking_log(self): fn = self.get('log', self.default_log) if not exists(fn): raise ConfigError(f'Log", "config: raise ConfigError(f'{fname} should have \"{field}\" field') return config @property def marking_log(self): fn", "None return pjoin(*template.split('/')) def get_students(self): if not exists(self.student_fname): raise ConfigError('Run gdo-mkstable here') return", "self.get('notebooks', {}).get('template') if template is None: return None return pjoin(*template.split('/')) def get_students(self): if", "if line == 'Ordinary maxima:': state = 'ordinary-scores' elif state == 'ordinary-scores': if", "return self.params['year'] @property def student_fname(self): return f'students_{self.year}.csv' @property def marks_fname(self): return f'marks_{self.year}.csv' @property", "= contents.splitlines() state = 'searching' o_scores = OrderedDict() e_scores = OrderedDict() for i,", "get(self, key, *args, **kwargs): return self.params.get(key, *args, **kwargs) @property def params(self): if self._params", "ConfigError(RuntimeError): pass class Config: config_fname = 'gdconfig.toml' required_fields = ('year',) default_log = 'marking_log.md'", "def _read_config(self): fname = self.config_fname if not exists(fname): raise ConfigError( f'Should be {fname}", "self.get('log', self.default_log) if not exists(fn): raise ConfigError(f'Log {fn} does not exist') return fn", "should have \"{field}\" field') return config @property def marking_log(self): fn = self.get('log', self.default_log)", "None def __getitem__(self, key): return self.params[key] def __contains__(self, key): return key in self.params", "e_scores def proc_line(line): if not line.startswith('*'): raise ValueError('Invalid list element') return [v.strip() for", "get_score_lines(o_scores, e_scores): lines = [f'* {k}: {v}' for k, v in o_scores.items()] if", "proc_line(line) o_scores[key] = float(value) elif state == 'extra-scores': if line.startswith('Total'): break key, value", "self._read_config() return self._params def _read_config(self): fname = self.config_fname if not exists(fname): raise ConfigError(", "{fname} in current directory') with open(fname, 'rb') as fobj: config = toml.load(fobj) for", "pjoin(*template.split('/')) def get_students(self): if not exists(self.student_fname): raise ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname) @property", "return get_score_lines(*self.scores) CONFIG = Config() def print_year(): print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish, 'read'):", "== 'searching': if line == 'Ordinary maxima:': state = 'ordinary-scores' elif state ==", "state == 'ordinary-scores': if line == 'Extra maxima:': state = 'extra-scores' continue elif", "template is None: return None return pjoin(*template.split('/')) def get_students(self): if not exists(self.student_fname): raise", "params(self): if self._params is None: self._params = self._read_config() return self._params def _read_config(self): fname", "*args, **kwargs) @property def params(self): if self._params is None: self._params = self._read_config() return", "i, line in enumerate(lines): line = line.strip() if line == '': continue if", "pandas as pd class ConfigError(RuntimeError): pass class Config: config_fname = 'gdconfig.toml' required_fields =", "for grading \"\"\" from os.path import exists, join as pjoin from collections import", "def get_students(self): if not exists(self.student_fname): raise ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname) @property def", "for v in line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines = [f'* {k}: {v}' for", "import OrderedDict import pytoml as toml import pandas as pd class ConfigError(RuntimeError): pass", "= self.get('notebooks', {}).get('template') if template is None: return None return pjoin(*template.split('/')) def get_students(self):", "if not exists(self.student_fname): raise ConfigError('Run gdo-mkstable here') return pd.read_csv(self.student_fname) @property def scores(self): return", "return self.params.get(key, *args, **kwargs) @property def params(self): if self._params is None: self._params =", "lines.append('') lines += [f'* {k}: {v}' for k, v in e_scores.items()] return '\\n'.join(lines)", "= 'marking_log.md' def __init__(self): self._params = None def __getitem__(self, key): return self.params[key] def", "line[1:].split(':')] def get_score_lines(o_scores, e_scores): lines = [f'* {k}: {v}' for k, v in", "None: return None return pjoin(*template.split('/')) def get_students(self): if not exists(self.student_fname): raise ConfigError('Run gdo-mkstable", "def print_year(): print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish, 'read'): contents = fileish.read() else: with", "as pjoin from collections import OrderedDict import pytoml as toml import pandas as", "'searching': if line == 'Ordinary maxima:': state = 'ordinary-scores' elif state == 'ordinary-scores':", "Config() def print_year(): print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish, 'read'): contents = fileish.read() else:", "o_scores.items()] if e_scores: lines.append('') lines += [f'* {k}: {v}' for k, v in", "current directory') with open(fname, 'rb') as fobj: config = toml.load(fobj) for field in", "toml.load(fobj) for field in self.required_fields: if not field in config: raise ConfigError(f'{fname} should", "not exist') return fn @property def year(self): return self.params['year'] @property def student_fname(self): return", "\"\"\" Tools for grading \"\"\" from os.path import exists, join as pjoin from", "import pytoml as toml import pandas as pd class ConfigError(RuntimeError): pass class Config:", "join as pjoin from collections import OrderedDict import pytoml as toml import pandas", "Tools for grading \"\"\" from os.path import exists, join as pjoin from collections", "as fobj: contents = fobj.read() lines = contents.splitlines() state = 'searching' o_scores =", "__contains__(self, key): return key in self.params def get(self, key, *args, **kwargs): return self.params.get(key,", "fn @property def year(self): return self.params['year'] @property def student_fname(self): return f'students_{self.year}.csv' @property def", "{k}: {v}' for k, v in o_scores.items()] if e_scores: lines.append('') lines += [f'*", "== 'ordinary-scores': if line == 'Extra maxima:': state = 'extra-scores' continue elif line.startswith('Total'):", "return get_scores(self.marking_log) @property def score_lines(self): return get_score_lines(*self.scores) CONFIG = Config() def print_year(): print(CONFIG['year'])", "continue elif line.startswith('Total'): break key, value = proc_line(line) o_scores[key] = float(value) elif state", "@property def scores(self): return get_scores(self.marking_log) @property def score_lines(self): return get_score_lines(*self.scores) CONFIG = Config()", "'gdconfig.toml' required_fields = ('year',) default_log = 'marking_log.md' def __init__(self): self._params = None def", "config = toml.load(fobj) for field in self.required_fields: if not field in config: raise", "'ordinary-scores' elif state == 'ordinary-scores': if line == 'Extra maxima:': state = 'extra-scores'", "= Config() def print_year(): print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish, 'read'): contents = fileish.read()", "raise ConfigError(f'Log {fn} does not exist') return fn @property def year(self): return self.params['year']", "class Config: config_fname = 'gdconfig.toml' required_fields = ('year',) default_log = 'marking_log.md' def __init__(self):", "return config @property def marking_log(self): fn = self.get('log', self.default_log) if not exists(fn): raise", "nb_template(self): template = self.get('notebooks', {}).get('template') if template is None: return None return pjoin(*template.split('/'))", "maxima:': state = 'ordinary-scores' elif state == 'ordinary-scores': if line == 'Extra maxima:':", "score_lines(self): return get_score_lines(*self.scores) CONFIG = Config() def print_year(): print(CONFIG['year']) def get_scores(fileish): if hasattr(fileish,", "key): return key in self.params def get(self, key, *args, **kwargs): return self.params.get(key, *args,", "key, *args, **kwargs): return self.params.get(key, *args, **kwargs) @property def params(self): if self._params is", "ConfigError(f'Log {fn} does not exist') return fn @property def year(self): return self.params['year'] @property", "exists(fname): raise ConfigError( f'Should be {fname} in current directory') with open(fname, 'rb') as", "def __init__(self): self._params = None def __getitem__(self, key): return self.params[key] def __contains__(self, key):", "= [f'* {k}: {v}' for k, v in o_scores.items()] if e_scores: lines.append('') lines", "line == 'Extra maxima:': state = 'extra-scores' continue elif line.startswith('Total'): break key, value", "self.params['year'] @property def student_fname(self): return f'students_{self.year}.csv' @property def marks_fname(self): return f'marks_{self.year}.csv' @property def", "+= [f'* {k}: {v}' for k, v in e_scores.items()] return '\\n'.join(lines) + '\\n'", "not line.startswith('*'): raise ValueError('Invalid list element') return [v.strip() for v in line[1:].split(':')] def", "= float(value) return o_scores, e_scores def proc_line(line): if not line.startswith('*'): raise ValueError('Invalid list", "directory') with open(fname, 'rb') as fobj: config = toml.load(fobj) for field in self.required_fields:", "line.startswith('Total'): break key, value = proc_line(line) o_scores[key] = float(value) elif state == 'extra-scores':", "line = line.strip() if line == '': continue if state == 'searching': if", "lines += [f'* {k}: {v}' for k, v in e_scores.items()] return '\\n'.join(lines) +", "collections import OrderedDict import pytoml as toml import pandas as pd class ConfigError(RuntimeError):", "o_scores = OrderedDict() e_scores = OrderedDict() for i, line in enumerate(lines): line =" ]
[ "importance_type: str = \"gain\", define_metric: bool = True, ): if wandb.run is None:", "& Biases Artifacts log_feature_importance: (boolean) if True log a feature importance bar plot", "after_training(self, model): \"\"\"Run after training is finished.\"\"\" # Log the booster model as", "`wandb.summary` when `define_metric=True` (default). Example: ```python bst_params = dict( objective ='reg:squarederror', colsample_bytree =", "automatically integrates XGBoost with wandb. Arguments: log_model: (boolean) if True save and upload", "log the booster model configuration to Weights & Biases - log evaluation metrics", "= 5, alpha = 10, n_estimators = 10, tree_method = 'hist' ) xg_reg", "columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature Importance\" )", "importance plot when `log_feature_importance=True` (default). - Capture the best eval metric in `wandb.summary`", "import xgboost as xgb from typing import cast from pathlib import Path MINIMIZE_METRICS", "eval metric in `wandb.summary` when `define_metric=True` (default). Example: ```python bst_params = dict( objective", "metric in evals_log.items(): for metric_name, log in metric.items(): if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\":", "if wandb.run is None: raise wandb.Error(\"You must call wandb.init() before WandbCallback()\") self.log_model =", "return model def after_training(self, model): \"\"\"Run after training is finished.\"\"\" # Log the", ") def _define_metric(self, data, metric_name): if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name)", "self._log_feature_importance(model) # Log the best score and best iteration if model.attr(\"best_score\") is not", "y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\" def __init__( self, log_model: bool =", "# Log the booster model as artifacts if self.log_model: self._log_model_as_artifact(model) # Plot feature", "if self.log_feature_importance: self._log_feature_importance(model) # Log the best score and best iteration if model.attr(\"best_score\")", "\"Importance\"]) wandb.log( { \"Feature Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature Importance\" ) }", "for tree model. weight for linear model. define_metric: (boolean) if True (default) capture", "score and best iteration if model.attr(\"best_score\") is not None: wandb.log( { \"best_score\": float(cast(str,", "of WandbCallback. Please try the new logger for more features. \"\"\" warnings.warn( \"wandb_callback", "(default). Example: ```python bst_params = dict( objective ='reg:squarederror', colsample_bytree = 0.3, learning_rate =", "save and upload the model to Weights & Biases Artifacts log_feature_importance: (boolean) if", "\"\"\" xgboost init \"\"\" import os import json import wandb import warnings import", "metric collected by XGBoost (if you provide training data to eval_set) - log", "wandb.init() before WandbCallback()\") self.log_model = log_model self.log_feature_importance = log_feature_importance self.importance_type = importance_type self.define_metric", "= True, ): if wandb.run is None: raise wandb.Error(\"You must call wandb.init() before", "upload the model to Weights & Biases Artifacts log_feature_importance: (boolean) if True log", "log_model self.log_feature_importance = log_feature_importance self.importance_type = importance_type self.define_metric = define_metric def before_training(self, model):", "log in metric.items(): if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]},", "in env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates", "for linear model. define_metric: (boolean) if True (default) capture model performance at the", "Passing `WandbCallback` to XGBoost will: - log the booster model configuration to Weights", "\"\"\" warnings.warn( \"wandb_callback will be deprecated in favor of WandbCallback. Please use WandbCallback", "\"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } ) return model def after_iteration(self, model,", "in `wandb.summary` when `define_metric=True` (default). Example: ```python bst_params = dict( objective ='reg:squarederror', colsample_bytree", ") xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\" def", "deprecated in favor of WandbCallback. Please use WandbCallback for more features.\", UserWarning, stacklevel=2,", "model): fi = model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]] for k in fi] table", "model. define_metric: (boolean) if True (default) capture model performance at the best step,", "return model def after_iteration(self, model, epoch, evals_log): \"\"\"Run after each iteration. Return True", "log evaluation metrics collected by XGBoost, such as rmse, accuracy etc to Weights", "booster model configuration to Weights & Biases - log evaluation metrics collected by", "training metric collected by XGBoost (if you provide training data to eval_set) -", "commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric = False return False def", "fi[k]] for k in fi] table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature", "step, of training in your `wandb.summary`. Passing `WandbCallback` to XGBoost will: - log", "MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\" Old style", "def after_training(self, model): \"\"\"Run after training is finished.\"\"\" # Log the booster model", "collected by XGBoost (if you provide training data to eval_set) - log the", "booster model as artifacts if self.log_model: self._log_model_as_artifact(model) # Plot feature importance if self.log_feature_importance:", "after_iteration(self, model, epoch, evals_log): \"\"\"Run after each iteration. Return True when training should", "\"wandb_callback will be deprecated in favor of WandbCallback. Please use WandbCallback for more", "if True (default) capture model performance at the best step, instead of the", "plot importance_type: (str) one of {weight, gain, cover, total_gain, total_cover} for tree model.", "config = model.save_config() wandb.config.update(json.loads(config)) return model def after_training(self, model): \"\"\"Run after training is", "model to to Weights & Biases Artifacts (when `log_model = True`) - log", "callback that will be deprecated in favor of WandbCallback. Please try the new", "model): \"\"\"Run before training is finished\"\"\" # Update W&B config config = model.save_config()", "by XGBoost, such as rmse, accuracy etc to Weights & Biases - log", "xgboost as xgb from typing import cast from pathlib import Path MINIMIZE_METRICS =", "log the best score and the best iteration - save and upload your", "__init__( self, log_model: bool = False, log_feature_importance: bool = True, importance_type: str =", "\"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\" Old style callback that will", "log_model: (boolean) if True save and upload the model to Weights & Biases", "True (default) capture model performance at the best step, instead of the last", "True`) - log feature importance plot when `log_feature_importance=True` (default). - Capture the best", "instead of the last step, of training in your `wandb.summary`. Passing `WandbCallback` to", "metric.items(): if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\":", "logger for more features. \"\"\" warnings.warn( \"wandb_callback will be deprecated in favor of", "evaluation metrics collected by XGBoost, such as rmse, accuracy etc to Weights &", "to Weights & Biases Artifacts (when `log_model = True`) - log feature importance", "Biases - log evaluation metrics collected by XGBoost, such as rmse, accuracy etc", "the best step, instead of the last step, of training in your `wandb.summary`.", "self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric", "metric in `wandb.summary` when `define_metric=True` (default). Example: ```python bst_params = dict( objective ='reg:squarederror',", "as xgb from typing import cast from pathlib import Path MINIMIZE_METRICS = [", "configuration to Weights & Biases - log evaluation metrics collected by XGBoost, such", "XGBoost will: - log the booster model configuration to Weights & Biases -", "before training is finished\"\"\" # Update W&B config config = model.save_config() wandb.config.update(json.loads(config)) return", "favor of WandbCallback. Please try the new logger for more features. \"\"\" warnings.warn(", "the last step, of training in your `wandb.summary`. Passing `WandbCallback` to XGBoost will:", "MINIMIZE_METRICS = [ \"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\", ]", "{weight, gain, cover, total_gain, total_cover} for tree model. weight for linear model. define_metric:", "import wandb import warnings import xgboost as xgb from typing import cast from", "and upload the model to Weights & Biases Artifacts log_feature_importance: (boolean) if True", "training in your `wandb.summary`. Passing `WandbCallback` to XGBoost will: - log the booster", "provide training data to eval_set) - log the best score and the best", "_log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact =", "eval_set) - log the best score and the best iteration - save and", "\"merror\", ] MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\"", "import json import wandb import warnings import xgboost as xgb from typing import", "`log_feature_importance=True` (default). - Capture the best eval metric in `wandb.summary` when `define_metric=True` (default).", "new logger for more features. \"\"\" warnings.warn( \"wandb_callback will be deprecated in favor", "_define_metric(self, data, metric_name): if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS:", "[ \"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS =", "(str) one of {weight, gain, cover, total_gain, total_cover} for tree model. weight for", "Old style callback that will be deprecated in favor of WandbCallback. Please try", "wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } ) return model def", "[\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\" Old style callback that", "your trained model to to Weights & Biases Artifacts (when `log_model = True`)", "self.log_model: self._log_model_as_artifact(model) # Plot feature importance if self.log_feature_importance: self._log_feature_importance(model) # Log the best", "= [\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\" Old style callback", "init \"\"\" import os import json import wandb import warnings import xgboost as", "model def after_iteration(self, model, epoch, evals_log): \"\"\"Run after each iteration. Return True when", "wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type) fi_data = [[k,", "Artifacts log_feature_importance: (boolean) if True log a feature importance bar plot importance_type: (str)", "callbacks=[WandbCallback()]) ) ``` \"\"\" def __init__( self, log_model: bool = False, log_feature_importance: bool", "is finished.\"\"\" # Log the booster model as artifacts if self.log_model: self._log_model_as_artifact(model) #", "model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type)", "callback(env): for k, v in env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({}) return callback class", "and upload your trained model to to Weights & Biases Artifacts (when `log_model", "bool = False, log_feature_importance: bool = True, importance_type: str = \"gain\", define_metric: bool", "pathlib import Path MINIMIZE_METRICS = [ \"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\", \"error\",", "metrics collected by XGBoost, such as rmse, accuracy etc to Weights & Biases", "Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model):", "elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MAXIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"max\") else:", "\"error@t\", \"merror\", ] MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def wandb_callback():", "eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\" def __init__( self, log_model: bool = False,", "wandb. Arguments: log_model: (boolean) if True save and upload the model to Weights", "evals_log): \"\"\"Run after each iteration. Return True when training should stop.\"\"\" # Log", "if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch})", "config config = model.save_config() wandb.config.update(json.loads(config)) return model def after_training(self, model): \"\"\"Run after training", "bool = True, ): if wandb.run is None: raise wandb.Error(\"You must call wandb.init()", "define_metric: (boolean) if True (default) capture model performance at the best step, instead", "(boolean) if True save and upload the model to Weights & Biases Artifacts", "training should stop.\"\"\" # Log metrics for data, metric in evals_log.items(): for metric_name,", "model_name = f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\")", "n_estimators = 10, tree_method = 'hist' ) xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test,", "= 0.1, max_depth = 5, alpha = 10, n_estimators = 10, tree_method =", "gain, cover, total_gain, total_cover} for tree model. weight for linear model. define_metric: (boolean)", "Plot feature importance if self.log_feature_importance: self._log_feature_importance(model) # Log the best score and best", "linear model. define_metric: (boolean) if True (default) capture model performance at the best", "and best iteration if model.attr(\"best_score\") is not None: wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))),", "0.3, learning_rate = 0.1, max_depth = 5, alpha = 10, n_estimators = 10,", "\"Feature\", \"Importance\", title=\"Feature Importance\" ) } ) def _define_metric(self, data, metric_name): if \"loss\"", "{ \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } ) return model def after_iteration(self,", "10, tree_method = 'hist' ) xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()])", "\"Importance\", title=\"Feature Importance\" ) } ) def _define_metric(self, data, metric_name): if \"loss\" in", "of WandbCallback. Please use WandbCallback for more features.\", UserWarning, stacklevel=2, ) def callback(env):", "self.log_feature_importance: self._log_feature_importance(model) # Log the best score and best iteration if model.attr(\"best_score\") is", "wandb.config.update(json.loads(config)) return model def after_training(self, model): \"\"\"Run after training is finished.\"\"\" # Log", "self, log_model: bool = False, log_feature_importance: bool = True, importance_type: str = \"gain\",", "model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]] for", "Weights & Biases Artifacts (when `log_model = True`) - log feature importance plot", "trained model to to Weights & Biases Artifacts (when `log_model = True`) -", "for k in fi] table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature Importance\":", "def wandb_callback(): \"\"\" Old style callback that will be deprecated in favor of", "\"gain\", define_metric: bool = True, ): if wandb.run is None: raise wandb.Error(\"You must", "model configuration to Weights & Biases - log evaluation metrics collected by XGBoost,", "int(cast(str, model.attr(\"best_iteration\"))), } ) return model def after_iteration(self, model, epoch, evals_log): \"\"\"Run after", "best step, instead of the last step, of training in your `wandb.summary`. Passing", "model, epoch, evals_log): \"\"\"Run after each iteration. Return True when training should stop.\"\"\"", "self.log_model = log_model self.log_feature_importance = log_feature_importance self.importance_type = importance_type self.define_metric = define_metric def", "for k, v in env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback):", "wandb.log({\"epoch\": epoch}) self.define_metric = False return False def _log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\"", "= dict( objective ='reg:squarederror', colsample_bytree = 0.3, learning_rate = 0.1, max_depth = 5,", "<gh_stars>0 # -*- coding: utf-8 -*- \"\"\" xgboost init \"\"\" import os import", "with wandb. Arguments: log_model: (boolean) if True save and upload the model to", "dict( objective ='reg:squarederror', colsample_bytree = 0.3, learning_rate = 0.1, max_depth = 5, alpha", "total_cover} for tree model. weight for linear model. define_metric: (boolean) if True (default)", "= wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature", "utf-8 -*- \"\"\" xgboost init \"\"\" import os import json import wandb import", "log training metric collected by XGBoost (if you provide training data to eval_set)", "= define_metric def before_training(self, model): \"\"\"Run before training is finished\"\"\" # Update W&B", "y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\" def __init__( self, log_model: bool = False, log_feature_importance:", "model.attr(\"best_score\") is not None: wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), }", "self.log_feature_importance = log_feature_importance self.importance_type = importance_type self.define_metric = define_metric def before_training(self, model): \"\"\"Run", "Log metrics for data, metric in evals_log.items(): for metric_name, log in metric.items(): if", "wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature Importance\" ) } ) def _define_metric(self, data, metric_name):", "Biases - log training metric collected by XGBoost (if you provide training data", "\"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name)", "wandb.run is None: raise wandb.Error(\"You must call wandb.init() before WandbCallback()\") self.log_model = log_model", "at the best step, instead of the last step, of training in your", "for more features. \"\"\" warnings.warn( \"wandb_callback will be deprecated in favor of WandbCallback.", "warnings import xgboost as xgb from typing import cast from pathlib import Path", "\"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\" Old style callback that will be deprecated in", "UserWarning, stacklevel=2, ) def callback(env): for k, v in env.evaluation_result_list: wandb.log({k: v}, commit=False)", "summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MAXIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"max\")", "self.importance_type = importance_type self.define_metric = define_metric def before_training(self, model): \"\"\"Run before training is", "from pathlib import Path MINIMIZE_METRICS = [ \"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\",", "Weights & Biases Artifacts log_feature_importance: (boolean) if True log a feature importance bar", "def _define_metric(self, data, metric_name): if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in", "call wandb.init() before WandbCallback()\") self.log_model = log_model self.log_feature_importance = log_feature_importance self.importance_type = importance_type", "metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric = False", "in favor of WandbCallback. Please use WandbCallback for more features.\", UserWarning, stacklevel=2, )", "Update W&B config config = model.save_config() wandb.config.update(json.loads(config)) return model def after_training(self, model): \"\"\"Run", "WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost with wandb. Arguments: log_model: (boolean) if True save", "capture model performance at the best step, instead of the last step, of", "feature importance bar plot importance_type: (str) one of {weight, gain, cover, total_gain, total_cover}", "json import wandb import warnings import xgboost as xgb from typing import cast", "v in env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically", "data, metric in evals_log.items(): for metric_name, log in metric.items(): if self.define_metric: self._define_metric(data, metric_name)", "= [ \"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS", "metric_name, log in metric.items(): if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\":", "commit=False) wandb.log({\"epoch\": epoch}) self.define_metric = False return False def _log_model_as_artifact(self, model): model_name =", "& Biases Artifacts (when `log_model = True`) - log feature importance plot when", "iteration. Return True when training should stop.\"\"\" # Log metrics for data, metric", "title=\"Feature Importance\" ) } ) def _define_metric(self, data, metric_name): if \"loss\" in str.lower(metric_name):", "xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\" def __init__(", "Arguments: log_model: (boolean) if True save and upload the model to Weights &", "= False return False def _log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir)", "(if you provide training data to eval_set) - log the best score and", "`wandb.summary`. Passing `WandbCallback` to XGBoost will: - log the booster model configuration to", "tree_method = 'hist' ) xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) )", "data to eval_set) - log the best score and the best iteration -", "= log_feature_importance self.importance_type = importance_type self.define_metric = define_metric def before_training(self, model): \"\"\"Run before", ") } ) def _define_metric(self, data, metric_name): if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\")", "os import json import wandb import warnings import xgboost as xgb from typing", "for more features.\", UserWarning, stacklevel=2, ) def callback(env): for k, v in env.evaluation_result_list:", "\"map@n\"] def wandb_callback(): \"\"\" Old style callback that will be deprecated in favor", "- log evaluation metrics collected by XGBoost, such as rmse, accuracy etc to", "log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric = False return False def _log_model_as_artifact(self, model): model_name", "the model to Weights & Biases Artifacts log_feature_importance: (boolean) if True log a", "bool = True, importance_type: str = \"gain\", define_metric: bool = True, ): if", "self.define_metric = define_metric def before_training(self, model): \"\"\"Run before training is finished\"\"\" # Update", "learning_rate = 0.1, max_depth = 5, alpha = 10, n_estimators = 10, tree_method", "] MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\" Old", "wandb.Error(\"You must call wandb.init() before WandbCallback()\") self.log_model = log_model self.log_feature_importance = log_feature_importance self.importance_type", "deprecated in favor of WandbCallback. Please try the new logger for more features.", "wandb_callback(): \"\"\" Old style callback that will be deprecated in favor of WandbCallback.", "last step, of training in your `wandb.summary`. Passing `WandbCallback` to XGBoost will: -", "\"\"\"Run before training is finished\"\"\" # Update W&B config config = model.save_config() wandb.config.update(json.loads(config))", "define_metric def before_training(self, model): \"\"\"Run before training is finished\"\"\" # Update W&B config", "the best eval metric in `wandb.summary` when `define_metric=True` (default). Example: ```python bst_params =", "wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]] for k", "coding: utf-8 -*- \"\"\" xgboost init \"\"\" import os import json import wandb", "try the new logger for more features. \"\"\" warnings.warn( \"wandb_callback will be deprecated", "\"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def", "more features. \"\"\" warnings.warn( \"wandb_callback will be deprecated in favor of WandbCallback. Please", "if model.attr(\"best_score\") is not None: wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))),", "0.1, max_depth = 5, alpha = 10, n_estimators = 10, tree_method = 'hist'", "log_feature_importance: (boolean) if True log a feature importance bar plot importance_type: (str) one", "= 10, tree_method = 'hist' ) xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)],", "such as rmse, accuracy etc to Weights & Biases - log training metric", "# Update W&B config config = model.save_config() wandb.config.update(json.loads(config)) return model def after_training(self, model):", "finished.\"\"\" # Log the booster model as artifacts if self.log_model: self._log_model_as_artifact(model) # Plot", "total_gain, total_cover} for tree model. weight for linear model. define_metric: (boolean) if True", "more features.\", UserWarning, stacklevel=2, ) def callback(env): for k, v in env.evaluation_result_list: wandb.log({k:", "def __init__( self, log_model: bool = False, log_feature_importance: bool = True, importance_type: str", "epoch}) self.define_metric = False return False def _log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\" model_path", "= 'hist' ) xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ```", "be deprecated in favor of WandbCallback. Please try the new logger for more", "None: raise wandb.Error(\"You must call wandb.init() before WandbCallback()\") self.log_model = log_model self.log_feature_importance =", "log_feature_importance self.importance_type = importance_type self.define_metric = define_metric def before_training(self, model): \"\"\"Run before training", "- log the best score and the best iteration - save and upload", "True save and upload the model to Weights & Biases Artifacts log_feature_importance: (boolean)", "iteration if model.attr(\"best_score\") is not None: wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str,", "collected by XGBoost, such as rmse, accuracy etc to Weights & Biases -", "wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost with wandb. Arguments: log_model:", "for metric_name, log in metric.items(): if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else:", "env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost", "wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature Importance\"", "when `log_feature_importance=True` (default). - Capture the best eval metric in `wandb.summary` when `define_metric=True`", "log a feature importance bar plot importance_type: (str) one of {weight, gain, cover,", "= \"gain\", define_metric: bool = True, ): if wandb.run is None: raise wandb.Error(\"You", "k, v in env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback`", "self._log_model_as_artifact(model) # Plot feature importance if self.log_feature_importance: self._log_feature_importance(model) # Log the best score", "score and the best iteration - save and upload your trained model to", "best score and best iteration if model.attr(\"best_score\") is not None: wandb.log( { \"best_score\":", "= Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self,", ") ``` \"\"\" def __init__( self, log_model: bool = False, log_feature_importance: bool =", "if self.log_model: self._log_model_as_artifact(model) # Plot feature importance if self.log_feature_importance: self._log_feature_importance(model) # Log the", "False def _log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) / model_name model.save_model(str(model_path))", "- log training metric collected by XGBoost (if you provide training data to", "a feature importance bar plot importance_type: (str) one of {weight, gain, cover, total_gain,", "Biases Artifacts log_feature_importance: (boolean) if True log a feature importance bar plot importance_type:", "def after_iteration(self, model, epoch, evals_log): \"\"\"Run after each iteration. Return True when training", "upload your trained model to to Weights & Biases Artifacts (when `log_model =", "to Weights & Biases - log training metric collected by XGBoost (if you", "WandbCallback. Please use WandbCallback for more features.\", UserWarning, stacklevel=2, ) def callback(env): for", "warnings.warn( \"wandb_callback will be deprecated in favor of WandbCallback. Please use WandbCallback for", "save and upload your trained model to to Weights & Biases Artifacts (when", "Please use WandbCallback for more features.\", UserWarning, stacklevel=2, ) def callback(env): for k,", "`WandbCallback` to XGBoost will: - log the booster model configuration to Weights &", "as rmse, accuracy etc to Weights & Biases - log training metric collected", "accuracy etc to Weights & Biases - log training metric collected by XGBoost", "of the last step, of training in your `wandb.summary`. Passing `WandbCallback` to XGBoost", "def _log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]] for k in", "raise wandb.Error(\"You must call wandb.init() before WandbCallback()\") self.log_model = log_model self.log_feature_importance = log_feature_importance", "to Weights & Biases - log evaluation metrics collected by XGBoost, such as", "- Capture the best eval metric in `wandb.summary` when `define_metric=True` (default). Example: ```python", "for data, metric in evals_log.items(): for metric_name, log in metric.items(): if self.define_metric: self._define_metric(data,", "wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric = False return False def _log_model_as_artifact(self, model):", "str = \"gain\", define_metric: bool = True, ): if wandb.run is None: raise", "Artifacts (when `log_model = True`) - log feature importance plot when `log_feature_importance=True` (default).", "# Log the best score and best iteration if model.attr(\"best_score\") is not None:", "wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MAXIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\",", "model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } ) return model def after_iteration(self, model, epoch, evals_log):", "when training should stop.\"\"\" # Log metrics for data, metric in evals_log.items(): for", "False, log_feature_importance: bool = True, importance_type: str = \"gain\", define_metric: bool = True,", "= model.save_config() wandb.config.update(json.loads(config)) return model def after_training(self, model): \"\"\"Run after training is finished.\"\"\"", "{ \"Feature Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature Importance\" ) } ) def", "model): model_name = f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name,", "model): \"\"\"Run after training is finished.\"\"\" # Log the booster model as artifacts", "model to Weights & Biases Artifacts log_feature_importance: (boolean) if True log a feature", "= f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path)", "use WandbCallback for more features.\", UserWarning, stacklevel=2, ) def callback(env): for k, v", "the best iteration - save and upload your trained model to to Weights", "log_model: bool = False, log_feature_importance: bool = True, importance_type: str = \"gain\", define_metric:", "\"\"\" Old style callback that will be deprecated in favor of WandbCallback. Please", "stop.\"\"\" # Log metrics for data, metric in evals_log.items(): for metric_name, log in", "import cast from pathlib import Path MINIMIZE_METRICS = [ \"rmse\", \"rmsle\", \"mae\", \"mape\",", "```python bst_params = dict( objective ='reg:squarederror', colsample_bytree = 0.3, learning_rate = 0.1, max_depth", "log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric = False return False", "best eval metric in `wandb.summary` when `define_metric=True` (default). Example: ```python bst_params = dict(", "iteration - save and upload your trained model to to Weights & Biases", "- log feature importance plot when `log_feature_importance=True` (default). - Capture the best eval", "True when training should stop.\"\"\" # Log metrics for data, metric in evals_log.items():", "if True save and upload the model to Weights & Biases Artifacts log_feature_importance:", "-*- coding: utf-8 -*- \"\"\" xgboost init \"\"\" import os import json import", "model.save_config() wandb.config.update(json.loads(config)) return model def after_training(self, model): \"\"\"Run after training is finished.\"\"\" #", "after each iteration. Return True when training should stop.\"\"\" # Log metrics for", "Please try the new logger for more features. \"\"\" warnings.warn( \"wandb_callback will be", "in evals_log.items(): for metric_name, log in metric.items(): if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]},", "model_path = Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def", "in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in", "xgboost init \"\"\" import os import json import wandb import warnings import xgboost", "model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type) fi_data", "cover, total_gain, total_cover} for tree model. weight for linear model. define_metric: (boolean) if", "you provide training data to eval_set) - log the best score and the", "v}, commit=False) wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost with wandb.", "should stop.\"\"\" # Log metrics for data, metric in evals_log.items(): for metric_name, log", "str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MAXIMIZE_METRICS:", "objective ='reg:squarederror', colsample_bytree = 0.3, learning_rate = 0.1, max_depth = 5, alpha =", "W&B config config = model.save_config() wandb.config.update(json.loads(config)) return model def after_training(self, model): \"\"\"Run after", "importance if self.log_feature_importance: self._log_feature_importance(model) # Log the best score and best iteration if", "float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } ) return model def after_iteration(self, model, epoch,", "model.attr(\"best_iteration\"))), } ) return model def after_iteration(self, model, epoch, evals_log): \"\"\"Run after each", "each iteration. Return True when training should stop.\"\"\" # Log metrics for data,", "): if wandb.run is None: raise wandb.Error(\"You must call wandb.init() before WandbCallback()\") self.log_model", "(default). - Capture the best eval metric in `wandb.summary` when `define_metric=True` (default). Example:", "\"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } ) return model def after_iteration(self, model, epoch, evals_log): \"\"\"Run", "`define_metric=True` (default). Example: ```python bst_params = dict( objective ='reg:squarederror', colsample_bytree = 0.3, learning_rate", "[[k, fi[k]] for k in fi] table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( {", "def _log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact", "is finished\"\"\" # Update W&B config config = model.save_config() wandb.config.update(json.loads(config)) return model def", "# -*- coding: utf-8 -*- \"\"\" xgboost init \"\"\" import os import json", "& Biases - log evaluation metrics collected by XGBoost, such as rmse, accuracy", "rmse, accuracy etc to Weights & Biases - log training metric collected by", "define_metric: bool = True, ): if wandb.run is None: raise wandb.Error(\"You must call", "(default) capture model performance at the best step, instead of the last step,", "feature importance if self.log_feature_importance: self._log_feature_importance(model) # Log the best score and best iteration", "plot when `log_feature_importance=True` (default). - Capture the best eval metric in `wandb.summary` when", "model performance at the best step, instead of the last step, of training", "model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi =", "stacklevel=2, ) def callback(env): for k, v in env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({})", "True, importance_type: str = \"gain\", define_metric: bool = True, ): if wandb.run is", "\"mae\", \"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\",", "fi] table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature Importance\": wandb.plot.bar( table, \"Feature\",", "must call wandb.init() before WandbCallback()\") self.log_model = log_model self.log_feature_importance = log_feature_importance self.importance_type =", "True, ): if wandb.run is None: raise wandb.Error(\"You must call wandb.init() before WandbCallback()\")", "self.define_metric = False return False def _log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\" model_path =", "= True, importance_type: str = \"gain\", define_metric: bool = True, ): if wandb.run", "= importance_type self.define_metric = define_metric def before_training(self, model): \"\"\"Run before training is finished\"\"\"", "importance bar plot importance_type: (str) one of {weight, gain, cover, total_gain, total_cover} for", "that will be deprecated in favor of WandbCallback. Please try the new logger", "after training is finished.\"\"\" # Log the booster model as artifacts if self.log_model:", "model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]] for k in fi] table = wandb.Table(data=fi_data, columns=[\"Feature\",", "will be deprecated in favor of WandbCallback. Please use WandbCallback for more features.\",", "by XGBoost (if you provide training data to eval_set) - log the best", "= [[k, fi[k]] for k in fi] table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log(", "def callback(env): for k, v in env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({}) return callback", "\"map\", \"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\" Old style callback that will be deprecated", "/ model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi", "in your `wandb.summary`. Passing `WandbCallback` to XGBoost will: - log the booster model", "is not None: wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } )", "} ) def _define_metric(self, data, metric_name): if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif", "log_feature_importance: bool = True, importance_type: str = \"gain\", define_metric: bool = True, ):", "to to Weights & Biases Artifacts (when `log_model = True`) - log feature", "\"\"\"Run after each iteration. Return True when training should stop.\"\"\" # Log metrics", "step, instead of the last step, of training in your `wandb.summary`. Passing `WandbCallback`", "self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric =", "fi_data = [[k, fi[k]] for k in fi] table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"])", "best score and the best iteration - save and upload your trained model", "Biases Artifacts (when `log_model = True`) - log feature importance plot when `log_feature_importance=True`", "artifacts if self.log_model: self._log_model_as_artifact(model) # Plot feature importance if self.log_feature_importance: self._log_feature_importance(model) # Log", "(boolean) if True log a feature importance bar plot importance_type: (str) one of", "log feature importance plot when `log_feature_importance=True` (default). - Capture the best eval metric", "bst_params = dict( objective ='reg:squarederror', colsample_bytree = 0.3, learning_rate = 0.1, max_depth =", "table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\",", "wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric = False return", "- save and upload your trained model to to Weights & Biases Artifacts", "xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\" def __init__( self, log_model: bool", "to Weights & Biases Artifacts log_feature_importance: (boolean) if True log a feature importance", "best iteration - save and upload your trained model to to Weights &", "from typing import cast from pathlib import Path MINIMIZE_METRICS = [ \"rmse\", \"rmsle\",", "wandb.log({k: v}, commit=False) wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost with", "of {weight, gain, cover, total_gain, total_cover} for tree model. weight for linear model.", "Return True when training should stop.\"\"\" # Log metrics for data, metric in", "k in fi] table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature Importance\": wandb.plot.bar(", "None: wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } ) return model", "the booster model as artifacts if self.log_model: self._log_model_as_artifact(model) # Plot feature importance if", "the best score and best iteration if model.attr(\"best_score\") is not None: wandb.log( {", "when `define_metric=True` (default). Example: ```python bst_params = dict( objective ='reg:squarederror', colsample_bytree = 0.3,", "import warnings import xgboost as xgb from typing import cast from pathlib import", "training data to eval_set) - log the best score and the best iteration", "Example: ```python bst_params = dict( objective ='reg:squarederror', colsample_bytree = 0.3, learning_rate = 0.1,", "\"\"\"`WandbCallback` automatically integrates XGBoost with wandb. Arguments: log_model: (boolean) if True save and", "else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) wandb.log({\"epoch\": epoch}) self.define_metric = False return False def _log_model_as_artifact(self,", "best iteration if model.attr(\"best_score\") is not None: wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\":", "class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost with wandb. Arguments: log_model: (boolean) if True", "Weights & Biases - log training metric collected by XGBoost (if you provide", "type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]]", "favor of WandbCallback. Please use WandbCallback for more features.\", UserWarning, stacklevel=2, ) def", "= 0.3, learning_rate = 0.1, max_depth = 5, alpha = 10, n_estimators =", "`log_model = True`) - log feature importance plot when `log_feature_importance=True` (default). - Capture", "style callback that will be deprecated in favor of WandbCallback. Please try the", "WandbCallback. Please try the new logger for more features. \"\"\" warnings.warn( \"wandb_callback will", "= False, log_feature_importance: bool = True, importance_type: str = \"gain\", define_metric: bool =", "= 10, n_estimators = 10, tree_method = 'hist' ) xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train,", "before_training(self, model): \"\"\"Run before training is finished\"\"\" # Update W&B config config =", "integrates XGBoost with wandb. Arguments: log_model: (boolean) if True save and upload the", "not None: wandb.log( { \"best_score\": float(cast(str, model.attr(\"best_score\"))), \"best_iteration\": int(cast(str, model.attr(\"best_iteration\"))), } ) return", "False return False def _log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) /", "in fi] table = wandb.Table(data=fi_data, columns=[\"Feature\", \"Importance\"]) wandb.log( { \"Feature Importance\": wandb.plot.bar( table,", "is None: raise wandb.Error(\"You must call wandb.init() before WandbCallback()\") self.log_model = log_model self.log_feature_importance", "\"\"\"Run after training is finished.\"\"\" # Log the booster model as artifacts if", "metric_name): if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\")", "Weights & Biases - log evaluation metrics collected by XGBoost, such as rmse,", "10, n_estimators = 10, tree_method = 'hist' ) xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train,", "as artifacts if self.log_model: self._log_model_as_artifact(model) # Plot feature importance if self.log_feature_importance: self._log_feature_importance(model) #", "to eval_set) - log the best score and the best iteration - save", "\"\"\" def __init__( self, log_model: bool = False, log_feature_importance: bool = True, importance_type:", "WandbCallback()\") self.log_model = log_model self.log_feature_importance = log_feature_importance self.importance_type = importance_type self.define_metric = define_metric", "-*- \"\"\" xgboost init \"\"\" import os import json import wandb import warnings", "be deprecated in favor of WandbCallback. Please use WandbCallback for more features.\", UserWarning,", "# Log metrics for data, metric in evals_log.items(): for metric_name, log in metric.items():", "typing import cast from pathlib import Path MINIMIZE_METRICS = [ \"rmse\", \"rmsle\", \"mae\",", "metrics for data, metric in evals_log.items(): for metric_name, log in metric.items(): if self.define_metric:", "of training in your `wandb.summary`. Passing `WandbCallback` to XGBoost will: - log the", "weight for linear model. define_metric: (boolean) if True (default) capture model performance at", ") return model def after_iteration(self, model, epoch, evals_log): \"\"\"Run after each iteration. Return", "table, \"Feature\", \"Importance\", title=\"Feature Importance\" ) } ) def _define_metric(self, data, metric_name): if", "features. \"\"\" warnings.warn( \"wandb_callback will be deprecated in favor of WandbCallback. Please use", "data, metric_name): if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\",", "= wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact) def _log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type) fi_data =", "='reg:squarederror', colsample_bytree = 0.3, learning_rate = 0.1, max_depth = 5, alpha = 10,", "xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\" def __init__( self, log_model:", "evals_log.items(): for metric_name, log in metric.items(): if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False)", "and the best iteration - save and upload your trained model to to", "\"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\", \"map\",", "to XGBoost will: - log the booster model configuration to Weights & Biases", "in metric.items(): if self.define_metric: self._define_metric(data, metric_name) wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False) else: wandb.log({f\"{data}-{metric_name}\": log[-1]}, commit=False)", "str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MAXIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"max\") else: pass", "xgb from typing import cast from pathlib import Path MINIMIZE_METRICS = [ \"rmse\",", "in favor of WandbCallback. Please try the new logger for more features. \"\"\"", "- log the booster model configuration to Weights & Biases - log evaluation", "\"ndcg\", \"map\", \"ndcg@n\", \"map@n\"] def wandb_callback(): \"\"\" Old style callback that will be", "before WandbCallback()\") self.log_model = log_model self.log_feature_importance = log_feature_importance self.importance_type = importance_type self.define_metric =", "def before_training(self, model): \"\"\"Run before training is finished\"\"\" # Update W&B config config", "XGBoost, such as rmse, accuracy etc to Weights & Biases - log training", "model as artifacts if self.log_model: self._log_model_as_artifact(model) # Plot feature importance if self.log_feature_importance: self._log_feature_importance(model)", "importance_type: (str) one of {weight, gain, cover, total_gain, total_cover} for tree model. weight", "import os import json import wandb import warnings import xgboost as xgb from", "= True`) - log feature importance plot when `log_feature_importance=True` (default). - Capture the", "training is finished.\"\"\" # Log the booster model as artifacts if self.log_model: self._log_model_as_artifact(model)", "\"\"\" import os import json import wandb import warnings import xgboost as xgb", "return False def _log_model_as_artifact(self, model): model_name = f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) / model_name", "will be deprecated in favor of WandbCallback. Please try the new logger for", "Path MINIMIZE_METRICS = [ \"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\",", "one of {weight, gain, cover, total_gain, total_cover} for tree model. weight for linear", "True log a feature importance bar plot importance_type: (str) one of {weight, gain,", "the new logger for more features. \"\"\" warnings.warn( \"wandb_callback will be deprecated in", "return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost with wandb. Arguments: log_model: (boolean)", "the booster model configuration to Weights & Biases - log evaluation metrics collected", "& Biases - log training metric collected by XGBoost (if you provide training", "5, alpha = 10, n_estimators = 10, tree_method = 'hist' ) xg_reg =", "XGBoost with wandb. Arguments: log_model: (boolean) if True save and upload the model", "your `wandb.summary`. Passing `WandbCallback` to XGBoost will: - log the booster model configuration", "colsample_bytree = 0.3, learning_rate = 0.1, max_depth = 5, alpha = 10, n_estimators", "cast from pathlib import Path MINIMIZE_METRICS = [ \"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\",", "if True log a feature importance bar plot importance_type: (str) one of {weight,", "# Plot feature importance if self.log_feature_importance: self._log_feature_importance(model) # Log the best score and", "\"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS = [\"auc\", \"aucpr\",", "alpha = 10, n_estimators = 10, tree_method = 'hist' ) xg_reg = xgb.XGBRegressor(**bst_params)", "Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature Importance\" ) } ) def _define_metric(self, data,", "f\"{wandb.run.id}_model.json\" model_path = Path(wandb.run.dir) / model_name model.save_model(str(model_path)) model_artifact = wandb.Artifact(name=model_name, type=\"model\") model_artifact.add_file(model_path) wandb.log_artifact(model_artifact)", "feature importance plot when `log_feature_importance=True` (default). - Capture the best eval metric in", "tree model. weight for linear model. define_metric: (boolean) if True (default) capture model", "callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost with wandb. Arguments: log_model: (boolean) if", "epoch, evals_log): \"\"\"Run after each iteration. Return True when training should stop.\"\"\" #", "etc to Weights & Biases - log training metric collected by XGBoost (if", "\"logloss\", \"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\", \"map@n\"]", "the best score and the best iteration - save and upload your trained", "Importance\" ) } ) def _define_metric(self, data, metric_name): if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\",", "importance_type self.define_metric = define_metric def before_training(self, model): \"\"\"Run before training is finished\"\"\" #", "_log_feature_importance(self, model): fi = model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]] for k in fi]", "wandb import warnings import xgboost as xgb from typing import cast from pathlib", "model def after_training(self, model): \"\"\"Run after training is finished.\"\"\" # Log the booster", "Log the best score and best iteration if model.attr(\"best_score\") is not None: wandb.log(", "fi = model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]] for k in fi] table =", "'hist' ) xg_reg = xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\"", "\"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS = [\"auc\",", "\"mphe\", \"logloss\", \"error\", \"error@t\", \"merror\", ] MAXIMIZE_METRICS = [\"auc\", \"aucpr\", \"ndcg\", \"map\", \"ndcg@n\",", "import Path MINIMIZE_METRICS = [ \"rmse\", \"rmsle\", \"mae\", \"mape\", \"mphe\", \"logloss\", \"error\", \"error@t\",", "} ) return model def after_iteration(self, model, epoch, evals_log): \"\"\"Run after each iteration.", ") def callback(env): for k, v in env.evaluation_result_list: wandb.log({k: v}, commit=False) wandb.log({}) return", "bar plot importance_type: (str) one of {weight, gain, cover, total_gain, total_cover} for tree", "= log_model self.log_feature_importance = log_feature_importance self.importance_type = importance_type self.define_metric = define_metric def before_training(self,", "Log the booster model as artifacts if self.log_model: self._log_model_as_artifact(model) # Plot feature importance", "will: - log the booster model configuration to Weights & Biases - log", "features.\", UserWarning, stacklevel=2, ) def callback(env): for k, v in env.evaluation_result_list: wandb.log({k: v},", "XGBoost (if you provide training data to eval_set) - log the best score", "max_depth = 5, alpha = 10, n_estimators = 10, tree_method = 'hist' )", "= model.get_score(importance_type=self.importance_type) fi_data = [[k, fi[k]] for k in fi] table = wandb.Table(data=fi_data,", "= xgb.XGBRegressor(**bst_params) xg_reg.fit(X_train, y_train, eval_set=[(X_test, y_test)], callbacks=[WandbCallback()]) ) ``` \"\"\" def __init__( self,", "wandb.log( { \"Feature Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature Importance\" ) } )", "(boolean) if True (default) capture model performance at the best step, instead of", "(when `log_model = True`) - log feature importance plot when `log_feature_importance=True` (default). -", "\"Feature Importance\": wandb.plot.bar( table, \"Feature\", \"Importance\", title=\"Feature Importance\" ) } ) def _define_metric(self,", "training is finished\"\"\" # Update W&B config config = model.save_config() wandb.config.update(json.loads(config)) return model", "Capture the best eval metric in `wandb.summary` when `define_metric=True` (default). Example: ```python bst_params", "model. weight for linear model. define_metric: (boolean) if True (default) capture model performance", "WandbCallback for more features.\", UserWarning, stacklevel=2, ) def callback(env): for k, v in", "``` \"\"\" def __init__( self, log_model: bool = False, log_feature_importance: bool = True,", "if \"loss\" in str.lower(metric_name): wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif str.lower(metric_name) in MINIMIZE_METRICS: wandb.define_metric(f\"{data}-{metric_name}\", summary=\"min\") elif", "commit=False) wandb.log({}) return callback class WandbCallback(xgb.callback.TrainingCallback): \"\"\"`WandbCallback` automatically integrates XGBoost with wandb. Arguments:", "performance at the best step, instead of the last step, of training in", "finished\"\"\" # Update W&B config config = model.save_config() wandb.config.update(json.loads(config)) return model def after_training(self," ]
[ "for r in range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as f: data =", "the specific language governing permissions and limitations under the License. \"\"\" import json", "json.load(f) with open('server_stats.json', 'w') as f: data['run_start'] = data['run_start'] - 7200000 data['run_end'] =", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "experiments.items(): os.chdir(dir) for r in range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as f:", "Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' } for exp, dir in", "'r') as f: data = json.load(f) with open('server_stats.json', 'w') as f: data['run_start'] =", "as f: data = json.load(f) with open('server_stats.json', 'w') as f: data['run_start'] = data['run_start']", "CONDITIONS OF ANY KIND, either express or implied. See the License for the", "OR CONDITIONS OF ANY KIND, either express or implied. See the License for", "OF ANY KIND, either express or implied. See the License for the specific", "to in writing, software distributed under the License is distributed on an \"AS", "'5 Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' } for exp, dir in experiments.items(): os.chdir(dir)", "f: data['run_start'] = data['run_start'] - 7200000 data['run_end'] = data['run_end'] - 7200000 json.dump(data, f)", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "not use this file except in compliance with the License. You may obtain", "License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "exp, dir in experiments.items(): os.chdir(dir) for r in range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json',", "except in compliance with the License. You may obtain a copy of the", "= json.load(f) with open('server_stats.json', 'w') as f: data['run_start'] = data['run_start'] - 7200000 data['run_end']", "may not use this file except in compliance with the License. You may", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "json import os experiments = { '1 Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "= { '1 Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' } for", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "2019 <NAME> Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "'1 Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' } for exp, dir", "os.chdir(dir) for r in range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as f: data", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "the License for the specific language governing permissions and limitations under the License.", "\"\"\" import json import os experiments = { '1 Client': '1Client_IdealBenchmark', '5 Clients':", "ANY KIND, either express or implied. See the License for the specific language", "'10Clients_IdealBenchmark' } for exp, dir in experiments.items(): os.chdir(dir) for r in range(1, 6):", "data = json.load(f) with open('server_stats.json', 'w') as f: data['run_start'] = data['run_start'] - 7200000", "file except in compliance with the License. You may obtain a copy of", "License for the specific language governing permissions and limitations under the License. \"\"\"", "Unless required by applicable law or agreed to in writing, software distributed under", "in range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as f: data = json.load(f) with", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing,", "2.0 (the \"License\"); you may not use this file except in compliance with", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "'w') as f: data['run_start'] = data['run_start'] - 7200000 data['run_end'] = data['run_end'] - 7200000", "= data['run_start'] - 7200000 data['run_end'] = data['run_end'] - 7200000 json.dump(data, f) os.chdir('..') os.chdir('..')", "limitations under the License. \"\"\" import json import os experiments = { '1", "See the License for the specific language governing permissions and limitations under the", "Clients': '10Clients_IdealBenchmark' } for exp, dir in experiments.items(): os.chdir(dir) for r in range(1,", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "r in range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as f: data = json.load(f)", "permissions and limitations under the License. \"\"\" import json import os experiments =", "\"\"\" Copyright 2019 <NAME> Licensed under the Apache License, Version 2.0 (the \"License\");", "the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "License, Version 2.0 (the \"License\"); you may not use this file except in", "compliance with the License. You may obtain a copy of the License at", "open('server_stats.json', 'w') as f: data['run_start'] = data['run_start'] - 7200000 data['run_end'] = data['run_end'] -", "(the \"License\"); you may not use this file except in compliance with the", "this file except in compliance with the License. You may obtain a copy", "\"License\"); you may not use this file except in compliance with the License.", "express or implied. See the License for the specific language governing permissions and", "} for exp, dir in experiments.items(): os.chdir(dir) for r in range(1, 6): os.chdir('run_{}'.format(r))", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "os experiments = { '1 Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark'", "'5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' } for exp, dir in experiments.items(): os.chdir(dir) for r", "you may not use this file except in compliance with the License. You", "for the specific language governing permissions and limitations under the License. \"\"\" import", "as f: data['run_start'] = data['run_start'] - 7200000 data['run_end'] = data['run_end'] - 7200000 json.dump(data,", "agreed to in writing, software distributed under the License is distributed on an", "the License. \"\"\" import json import os experiments = { '1 Client': '1Client_IdealBenchmark',", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as f: data = json.load(f) with open('server_stats.json', 'w') as", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "f: data = json.load(f) with open('server_stats.json', 'w') as f: data['run_start'] = data['run_start'] -", "software distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "by applicable law or agreed to in writing, software distributed under the License", "applicable law or agreed to in writing, software distributed under the License is", "implied. See the License for the specific language governing permissions and limitations under", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "<NAME> Licensed under the Apache License, Version 2.0 (the \"License\"); you may not", "and limitations under the License. \"\"\" import json import os experiments = {", "import json import os experiments = { '1 Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark',", "with open('server_stats.json', 'w') as f: data['run_start'] = data['run_start'] - 7200000 data['run_end'] = data['run_end']", "range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as f: data = json.load(f) with open('server_stats.json',", "law or agreed to in writing, software distributed under the License is distributed", "open('server_stats.json', 'r') as f: data = json.load(f) with open('server_stats.json', 'w') as f: data['run_start']", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "Version 2.0 (the \"License\"); you may not use this file except in compliance", "with open('server_stats.json', 'r') as f: data = json.load(f) with open('server_stats.json', 'w') as f:", "in compliance with the License. You may obtain a copy of the License", "the Apache License, Version 2.0 (the \"License\"); you may not use this file", "use this file except in compliance with the License. You may obtain a", "governing permissions and limitations under the License. \"\"\" import json import os experiments", "{ '1 Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' } for exp,", "KIND, either express or implied. See the License for the specific language governing", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "'1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' } for exp, dir in experiments.items():", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use", "for exp, dir in experiments.items(): os.chdir(dir) for r in range(1, 6): os.chdir('run_{}'.format(r)) with", "Copyright 2019 <NAME> Licensed under the Apache License, Version 2.0 (the \"License\"); you", "License. \"\"\" import json import os experiments = { '1 Client': '1Client_IdealBenchmark', '5", "'10 Clients': '10Clients_IdealBenchmark' } for exp, dir in experiments.items(): os.chdir(dir) for r in", "language governing permissions and limitations under the License. \"\"\" import json import os", "in writing, software distributed under the License is distributed on an \"AS IS\"", "6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as f: data = json.load(f) with open('server_stats.json', 'w')", "experiments = { '1 Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' }", "under the Apache License, Version 2.0 (the \"License\"); you may not use this", "specific language governing permissions and limitations under the License. \"\"\" import json import", "data['run_start'] = data['run_start'] - 7200000 data['run_end'] = data['run_end'] - 7200000 json.dump(data, f) os.chdir('..')", "writing, software distributed under the License is distributed on an \"AS IS\" BASIS,", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "dir in experiments.items(): os.chdir(dir) for r in range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r')", "either express or implied. See the License for the specific language governing permissions", "in experiments.items(): os.chdir(dir) for r in range(1, 6): os.chdir('run_{}'.format(r)) with open('server_stats.json', 'r') as", "Clients': '5Clients_IdealBenchmark', '10 Clients': '10Clients_IdealBenchmark' } for exp, dir in experiments.items(): os.chdir(dir) for", "or agreed to in writing, software distributed under the License is distributed on", "under the License. \"\"\" import json import os experiments = { '1 Client':", "Apache License, Version 2.0 (the \"License\"); you may not use this file except", "or implied. See the License for the specific language governing permissions and limitations", "import os experiments = { '1 Client': '1Client_IdealBenchmark', '5 Clients': '5Clients_IdealBenchmark', '10 Clients':", "with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "required by applicable law or agreed to in writing, software distributed under the", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software" ]
[ "] operations = [ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract (Leitsatz) formatted in", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('cases', '0009_auto_20180430_1225'), ]", "'0009_auto_20180430_1225'), ] operations = [ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract (Leitsatz) formatted", "by Django 2.1.1 on 2018-09-18 08:47 from django.db import migrations, models class Migration(migrations.Migration):", "[ ('cases', '0009_auto_20180430_1225'), ] operations = [ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract", "Migration(migrations.Migration): dependencies = [ ('cases', '0009_auto_20180430_1225'), ] operations = [ migrations.AddField( model_name='case', name='abstract',", "dependencies = [ ('cases', '0009_auto_20180430_1225'), ] operations = [ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True,", "models class Migration(migrations.Migration): dependencies = [ ('cases', '0009_auto_20180430_1225'), ] operations = [ migrations.AddField(", "import migrations, models class Migration(migrations.Migration): dependencies = [ ('cases', '0009_auto_20180430_1225'), ] operations =", "= [ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract (Leitsatz) formatted in Legal Markdown',", "2.1.1 on 2018-09-18 08:47 from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "08:47 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('cases', '0009_auto_20180430_1225'),", "migrations, models class Migration(migrations.Migration): dependencies = [ ('cases', '0009_auto_20180430_1225'), ] operations = [", "[ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract (Leitsatz) formatted in Legal Markdown', null=True),", "model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract (Leitsatz) formatted in Legal Markdown', null=True), ), ]", "Generated by Django 2.1.1 on 2018-09-18 08:47 from django.db import migrations, models class", "= [ ('cases', '0009_auto_20180430_1225'), ] operations = [ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case", "migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract (Leitsatz) formatted in Legal Markdown', null=True), ),", "<reponame>docsuleman/oldp<filename>oldp/apps/cases/migrations/0010_case_abstract.py # Generated by Django 2.1.1 on 2018-09-18 08:47 from django.db import migrations,", "on 2018-09-18 08:47 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('cases', '0009_auto_20180430_1225'), ] operations", "class Migration(migrations.Migration): dependencies = [ ('cases', '0009_auto_20180430_1225'), ] operations = [ migrations.AddField( model_name='case',", "2018-09-18 08:47 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('cases',", "# Generated by Django 2.1.1 on 2018-09-18 08:47 from django.db import migrations, models", "('cases', '0009_auto_20180430_1225'), ] operations = [ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract (Leitsatz)", "Django 2.1.1 on 2018-09-18 08:47 from django.db import migrations, models class Migration(migrations.Migration): dependencies", "operations = [ migrations.AddField( model_name='case', name='abstract', field=models.TextField(blank=True, help_text='Case abstract (Leitsatz) formatted in Legal" ]
[ "10): obj = next(generator) buf = objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] = buf for", "cannot be earlier than creation\" base_date = datetime.datetime(2000, 1, 1) for i in", "in objlist: if last_obj is None: last_obj = obj else: assert last_obj ==", "\"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\",", "test_idempotence(): \"Generate a mock data set multiple times and make sure they are", "mock data. \"\"\" import datetime from papilotte.connectors.mock import mockdata def test_generate_person(): \"Make sure", "\"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check if distribution of modifier names is close to", "+ 1) assert \"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\" in", "= {} for _ in range(num_of_different_objects * 10): obj = next(generator) buf =", "generated_factoids.append(next(generator)) return generated_factoids data_to_compare = make_factoids(250) for _ in range(10): assert data_to_compare ==", "equal and if there are exactly 3 modifiers. \"\"\" counter = {} for", "counter.get(\"yyyy-mm\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) +", "== \"Xxx %d_%d\" % (counter, i + 1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" %", "range(100): factoid = next(generator) assert factoid[\"@id\"] == \"Factoid %03d\" % (i + 1)", "modifier names is close to equal and if there are exactly 3 modifiers.", "\"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i in range(10): assert mockdata.get_datetime(base_date,", "mockdata.get_datetime(base_date, i, True) assert creation_time <= modification_time def test_idempotence(): \"Generate a mock data", "an uri or created as expected.\" for counter in (1, 4): objects =", "mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7) == {\"label\": \"July 1807\", \"sortdate\":", "0) + 1 assert counter[\"Creator 1\"] == counter[\"Creator 2\"] assert counter[\"Creator 1\"] ==", "counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the", "\"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\", } generator = mockdata.generate_statement(factoid, 1) for", "\"make_factoids is a convenience function to create test data.\" assert len(mockdata.make_factoids(15)) == 15", "= counter.get(\"None\", 0) + 1 elif data == {}: counter[\"empty\"] = counter.get(\"empty\", 0)", "range(999): modifier = mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Modifier 1\"]", "counter) for i, obj in enumerate(objects): assert obj[\"label\"] == \"Xxx %d_%d\" % (counter,", "num_of_different_objects = 15 generator = mockdata.generate_person(num_of_different_objects) objects = {} for _ in range(num_of_different_objects", "obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter, i + 1) def test_make_date(): \"Make date generates", "% (i + 1) def test_make_label_objects(): \"Make sure simple object consisting of a", "i in range(1, 6): assert mockdata.get_creator(i) == \"Creator %d\" % i def test_get_datetime():", "generator. \"\"\" generator = mockdata.generate_factoid() for i in range(100): factoid = next(generator) assert", "== {} def test_make_date_distribution(): \"Check if dates are equally distributed in mockdata.\" counter", "1 elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) + 1 elif data[\"sortdate\"].count(\"-\")", "{\"label\": \"8 August 1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) == {} def test_make_date_distribution(): \"Check", "+ 1 elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0) + 1 elif", "\"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2) == {\"label\": \"February 1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3)", "assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the factoid", "test_get_datetime(): \"Test the mockdata get_date function.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\",", "as expected.\" for counter in (1, 4): objects = mockdata.make_label_objects(3, \"xxx\", counter) for", "factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i + 1) def test_make_label_objects(): \"Make sure", "1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter, i + 1) def test_make_date(): \"Make", "test_generate_person(): \"Make sure generate_person() doesn not create more than 15 different persons.\" num_of_different_objects", "== counter[\"Creator 3\"] assert counter[\"Creator 1\"] == counter[\"Creator 4\"] assert counter[\"Creator 1\"] ==", "None: last_obj = obj else: assert last_obj == obj def test_generate_source(): \"Make sure", "set multiple times and make sure they are identical\" def make_factoids(num): generated_factoids =", "= mockdata.generate_factoid() for _ in range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare = make_factoids(250) for", "= [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\",", "3 modifiers. \"\"\" counter = {} for i in range(999): modifier = mockdata.get_modifier(i)", "15 generator = mockdata.generate_person(num_of_different_objects) objects = {} for _ in range(num_of_different_objects * 10):", "sources with sam pid contain same data for pid, objlist in objects.items(): last_obj", "buf for pid in objects: assert len(objects[pid]) == 10 # make sure persons", "% (i + 1) assert \"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"] assert", "3\"] def test_get_modifer(): \"Test creation order of get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier 3\"", "order of get_creator().\" for i in range(1, 6): assert mockdata.get_creator(i) == \"Creator %d\"", "\"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\",", "counter = {} for i in range(999): modifier = mockdata.get_modifier(i) counter[modifier] = counter.get(modifier,", "\"\"\" import datetime from papilotte.connectors.mock import mockdata def test_generate_person(): \"Make sure generate_person() doesn", "obj = next(generator) buf = objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] = buf for pid", "sources.\" num_of_different_objects = 25 generator = mockdata.generate_source(num_of_different_objects) objects = {} for _ in", "== \"Modifier 3\" assert mockdata.get_modifier(5) == \"Modifier 1\" assert mockdata.get_modifier(6) == \"Modifier 2\"", "1\"] == counter[\"Creator 3\"] assert counter[\"Creator 1\"] == counter[\"Creator 4\"] assert counter[\"Creator 1\"]", "data for pid, objlist in objects.items(): last_obj = None for obj in objlist:", "= mockdata.generate_factoid() for i in range(100): factoid = next(generator) assert factoid[\"@id\"] == \"Factoid", "counter = {} for i in range(1000): modifier = mockdata.get_creator(i) counter[modifier] = counter.get(modifier,", "_ in range(num_of_different_objects * 10): obj = next(generator) buf = objects.get(obj[\"@id\"], []) buf.append(obj)", "expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\",", "10 # make sure sources with sam pid contain same data for pid,", "persons with same pid contain same data for pid, objlist in objects.items(): last_obj", "assert mockdata.get_datetime(base_date, i, True) == expected[i] def test_mod_time_after_creation_time(): \"Assert modification cannot be earlier", "\"\"\"Test creation of mock data. \"\"\" import datetime from papilotte.connectors.mock import mockdata def", "1) for i in range(20): assert mockdata.get_datetime(base_date, i, True) == expected[i] def test_mod_time_after_creation_time():", "== expected[i] def test_get_datetime_with_offset(): \"Test if getting a date with offset works.\" expected", "for pid in objects: assert len(objects[pid]) == 10 # make sure sources with", "generate_source() does not create more than 15 different sources.\" num_of_different_objects = 25 generator", "assert counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"] assert", "next(generator) assert factoid[\"@id\"] == \"Factoid %03d\" % (i + 1) assert \"Person\" in", "len(objects[pid]) == 10 # make sure persons with same pid contain same data", "counter[\"Modifier 3\"] def test_get_modifer(): \"Test creation order of get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier", "== factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] ==", "\"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check if distribution of modifier names is close", "stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"]", "generate_person() doesn not create more than 15 different persons.\" num_of_different_objects = 15 generator", "\"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\",", "\"Modifier 3\" assert mockdata.get_modifier(5) == \"Modifier 1\" assert mockdata.get_modifier(6) == \"Modifier 2\" def", "\"\"\" counter = {} for i in range(1000): modifier = mockdata.get_creator(i) counter[modifier] =", "with sam pid contain same data for pid, objlist in objects.items(): last_obj =", "assert mockdata.get_datetime(base_date, i) == expected[i] def test_get_datetime_with_offset(): \"Test if getting a date with", "last_obj = obj else: assert last_obj == obj def test_generate_source(): \"Make sure generate_source()", "counter[\"Modifier 2\"] assert counter[\"Modifier 1\"] == counter[\"Modifier 3\"] def test_get_modifer(): \"Test creation order", "2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) + 1 assert counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"]", "objects = mockdata.make_label_objects(3, \"xxx\", counter) for i, obj in enumerate(objects): assert obj[\"label\"] ==", "= 15 generator = mockdata.generate_person(num_of_different_objects) objects = {} for _ in range(num_of_different_objects *", "\"1806\"} assert mockdata.make_date(7) == {\"label\": \"July 1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8) == {\"label\":", "assert last_obj == obj def test_generate_source(): \"Make sure generate_source() does not create more", "i in range(100): factoid = next(generator) assert factoid[\"@id\"] == \"Factoid %03d\" % (i", "\"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3) == [ \"http://example.com/1\",", "getting a date with offset works.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\",", "\"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i in range(10):", "next(generator) buf = objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] = buf for pid in objects:", "= [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ]", "def test_get_creator(): \"Test creation order of get_creator().\" for i in range(1, 6): assert", "\"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\", } generator =", "obj in objlist: if last_obj is None: last_obj = obj else: assert last_obj", "assert mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\",", "if distribution of creator names is close to equal and if there are", "assert mockdata.get_creator(i) == \"Creator %d\" % i def test_get_datetime(): \"Test the mockdata get_date", "def test_get_datetime_with_offset(): \"Test if getting a date with offset works.\" expected = [", "to equal and if there are exactly 3 modifiers. \"\"\" counter = {}", "test_get_creator_distribution(): \"\"\"Check if distribution of creator names is close to equal and if", "def test_generate_factoid(): \"\"\"Test the factoid generator. \"\"\" generator = mockdata.generate_factoid() for i in", "{} for i in range(1000): data = mockdata.make_date(i) if data is None: counter[\"None\"]", "data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 1:", "identical\" def make_factoids(num): generated_factoids = [] generator = mockdata.generate_factoid() for _ in range(num):", "factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\" in factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\"", "counter[\"Creator 1\"] == counter[\"Creator 2\"] assert counter[\"Creator 1\"] == counter[\"Creator 3\"] assert counter[\"Creator", "1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] =", "create more than 15 different sources.\" num_of_different_objects = 25 generator = mockdata.generate_source(num_of_different_objects) objects", "pid in objects: assert len(objects[pid]) == 10 # make sure persons with same", "== obj def test_generate_source(): \"Make sure generate_source() does not create more than 15", "datetime.datetime(2000, 1, 1) for i in range(1000): creation_time = mockdata.get_datetime(base_date, i) modification_time =", "in factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i + 1) def test_make_label_objects(): \"Make", "a mock data set multiple times and make sure they are identical\" def", "get_creator().\" for i in range(1, 6): assert mockdata.get_creator(i) == \"Creator %d\" % i", "\"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check", "<= modification_time def test_idempotence(): \"Generate a mock data set multiple times and make", "for i in range(20): assert mockdata.get_datetime(base_date, i, True) == expected[i] def test_mod_time_after_creation_time(): \"Assert", "\"modifiedBy\": \"User 2\", } generator = mockdata.generate_statement(factoid, 1) for i in range(5): stmt", "make_date might return an empty dict assert mockdata.make_date(0) is None assert mockdata.make_date(1) ==", "%03d\" % (i + 1) assert \"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"]", "1, 1) for i in range(1000): creation_time = mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date,", "= mockdata.generate_statement(factoid, 1) for i in range(5): stmt = next(generator) assert stmt[\"@id\"] ==", "+ 1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] ==", "\"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i in range(10): assert", "stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid():", "0) + 1 elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) + 1", "mockdata.generate_factoid() for _ in range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare = make_factoids(250) for _", "0) + 1 assert counter[\"Modifier 1\"] == counter[\"Modifier 2\"] assert counter[\"Modifier 1\"] ==", "= mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Creator 1\"] == counter[\"Creator", "there are exactly 3 modifiers. \"\"\" counter = {} for i in range(999):", "distribution of creator names is close to equal and if there are exactly", "\"Assert modification cannot be earlier than creation\" base_date = datetime.datetime(2000, 1, 1) for", "== [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\",", "sure persons with same pid contain same data for pid, objlist in objects.items():", "with same pid contain same data for pid, objlist in objects.items(): last_obj =", "_ in range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare = make_factoids(250) for _ in range(10):", "\"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i", "] assert mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\",", "1) for i in range(5): stmt = next(generator) assert stmt[\"@id\"] == \"F1S%d\" %", "base_date = datetime.datetime(2000, 1, 1) for i in range(10): assert mockdata.get_datetime(base_date, i) ==", "\"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\",", "and if there are exactly 3 creators. \"\"\" counter = {} for i", "assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the factoid generator. \"\"\" generator =", "assert mockdata.make_date(3) == {\"label\": \"3 March 1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is None", "== 10 # make sure persons with same pid contain same data for", "uri or created as expected.\" for counter in (1, 4): objects = mockdata.make_label_objects(3,", "close to equal and if there are exactly 3 creators. \"\"\" counter =", "= {} for i in range(1000): modifier = mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0)", "counter.get(modifier, 0) + 1 assert counter[\"Creator 1\"] == counter[\"Creator 2\"] assert counter[\"Creator 1\"]", "i, obj in enumerate(objects): assert obj[\"label\"] == \"Xxx %d_%d\" % (counter, i +", "def test_make_date(): \"Make date generates a dict consisting of a date-label and a", "= counter.get(\"yyyy-mm-dd\", 0) + 1 assert counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"]", "[]) buf.append(obj) objects[obj[\"@id\"]] = buf for pid in objects: assert len(objects[pid]) == 10", "= mockdata.generate_source(num_of_different_objects) objects = {} for _ in range(num_of_different_objects * 10): obj =", "function.\" assert mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2) == [", "5\"] def test_get_creator(): \"Test creation order of get_creator().\" for i in range(1, 6):", "1\"] == counter[\"Creator 2\"] assert counter[\"Creator 1\"] == counter[\"Creator 3\"] assert counter[\"Creator 1\"]", "consisting of a label and an uri or created as expected.\" for counter", "mockdata.get_modifier(6) == \"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check if distribution of creator names is", "datetime from papilotte.connectors.mock import mockdata def test_generate_person(): \"Make sure generate_person() doesn not create", "range(10): assert mockdata.get_datetime(base_date, i) == expected[i] def test_get_datetime_with_offset(): \"Test if getting a date", "\"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check if distribution of modifier", "modification cannot be earlier than creation\" base_date = datetime.datetime(2000, 1, 1) for i", "1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8) == {\"label\": \"8 August 1808\", \"sortdate\": \"1808-08-08\"} assert", "in mockdata.\" counter = {} for i in range(1000): data = mockdata.make_date(i) if", "counter[\"Creator 1\"] == counter[\"Creator 5\"] def test_get_creator(): \"Test creation order of get_creator().\" for", "\"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check if distribution of", "counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\",", "+ 1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter, i + 1) def test_make_date():", "\"1803-03-03\"} assert mockdata.make_date(5) is None assert mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\": \"1806\"} assert", "in range(100): factoid = next(generator) assert factoid[\"@id\"] == \"Factoid %03d\" % (i +", "a dict consisting of a date-label and a date string.\" # make_date might", "\"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def", "1 assert counter[\"Creator 1\"] == counter[\"Creator 2\"] assert counter[\"Creator 1\"] == counter[\"Creator 3\"]", "== 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) + 1 assert counter[\"None\"] == counter[\"empty\"] assert", "\"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i", "mockdata get_date function.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\",", "factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the factoid generator. \"\"\" generator = mockdata.generate_factoid() for i", "2\" assert mockdata.get_modifier(4) == \"Modifier 3\" assert mockdata.get_modifier(5) == \"Modifier 1\" assert mockdata.get_modifier(6)", "more than 15 different persons.\" num_of_different_objects = 15 generator = mockdata.generate_person(num_of_different_objects) objects =", "\"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\",", "\"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date", "same pid contain same data for pid, objlist in objects.items(): last_obj = None", "in range(1000): modifier = mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Creator", "times and make sure they are identical\" def make_factoids(num): generated_factoids = [] generator", "== [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\",", "assert factoid[\"@id\"] == \"Factoid %03d\" % (i + 1) assert \"Person\" in factoid[\"person\"][\"@id\"]", "dates are equally distributed in mockdata.\" counter = {} for i in range(1000):", "counter = {} for i in range(1000): data = mockdata.make_date(i) if data is", "mockdata.get_creator(i) == \"Creator %d\" % i def test_get_datetime(): \"Test the mockdata get_date function.\"", "0) + 1 elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0) + 1", "== 10 # make sure sources with sam pid contain same data for", "\"Make date generates a dict consisting of a date-label and a date string.\"", "= datetime.datetime(2000, 1, 1) for i in range(20): assert mockdata.get_datetime(base_date, i, True) ==", "\"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check if distribution of creator names is close to", "== obj def test_generate_statement(): \"Make sure generate_statement() works as expected.\" factoid = {", "data = mockdata.make_date(i) if data is None: counter[\"None\"] = counter.get(\"None\", 0) + 1", "= obj else: assert last_obj == obj def test_generate_statement(): \"Make sure generate_statement() works", "\"\"\" generator = mockdata.generate_factoid() for i in range(100): factoid = next(generator) assert factoid[\"@id\"]", "+ 1 elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) + 1 elif", "\"xxx\", counter) for i, obj in enumerate(objects): assert obj[\"label\"] == \"Xxx %d_%d\" %", "1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is None assert mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\":", "creation order of get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier 3\" assert mockdata.get_modifier(2) == \"Modifier", "range(1, 6): assert mockdata.get_creator(i) == \"Creator %d\" % i def test_get_datetime(): \"Test the", "for _ in range(10): assert data_to_compare == make_factoids(250) def test_make_factoids(): \"make_factoids is a", "counter[\"Creator 4\"] assert counter[\"Creator 1\"] == counter[\"Creator 5\"] def test_get_creator(): \"Test creation order", "\"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000, 1,", "last_obj is None: last_obj = obj else: assert last_obj == obj def test_generate_statement():", "elif data == {}: counter[\"empty\"] = counter.get(\"empty\", 0) + 1 elif data[\"sortdate\"].count(\"-\") ==", "data_to_compare == make_factoids(250) def test_make_factoids(): \"make_factoids is a convenience function to create test", "== counter[\"Creator 4\"] assert counter[\"Creator 1\"] == counter[\"Creator 5\"] def test_get_creator(): \"Test creation", "mockdata.make_date(3) == {\"label\": \"3 March 1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is None assert", "range(1000): data = mockdata.make_date(i) if data is None: counter[\"None\"] = counter.get(\"None\", 0) +", "assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the mockdata get_uri function.\" assert mockdata.get_uris(1)", "\"Test if getting a date with offset works.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\",", "assert counter[\"Creator 1\"] == counter[\"Creator 4\"] assert counter[\"Creator 1\"] == counter[\"Creator 5\"] def", "more than 15 different sources.\" num_of_different_objects = 25 generator = mockdata.generate_source(num_of_different_objects) objects =", "sure they are identical\" def make_factoids(num): generated_factoids = [] generator = mockdata.generate_factoid() for", "the mockdata get_uri function.\" assert mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert", "mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert", "{\"label\": \"February 1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3) == {\"label\": \"3 March 1803\", \"sortdate\":", "= mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Modifier 1\"] == counter[\"Modifier", "] def test_get_modifier_distribution(): \"\"\"Check if distribution of modifier names is close to equal", "and a date string.\" # make_date might return an empty dict assert mockdata.make_date(0)", "== \"Modifier 3\" assert mockdata.get_modifier(2) == \"Modifier 1\" assert mockdata.get_modifier(3) == \"Modifier 2\"", "test_make_date_distribution(): \"Check if dates are equally distributed in mockdata.\" counter = {} for", "mockdata.get_datetime(base_date, i) == expected[i] def test_get_datetime_with_offset(): \"Test if getting a date with offset", "\"Modifier 1\" assert mockdata.get_modifier(6) == \"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check if distribution of", "factoid = { \"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\",", "factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i + 1) def test_make_label_objects(): \"Make sure simple object", "objects = {} for _ in range(num_of_different_objects * 10): obj = next(generator) buf", "\"Generate a mock data set multiple times and make sure they are identical\"", "for i in range(1, 6): assert mockdata.get_creator(i) == \"Creator %d\" % i def", "buf = objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] = buf for pid in objects: assert", "counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris():", "for _ in range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare = make_factoids(250) for _ in", "_ in range(10): assert data_to_compare == make_factoids(250) def test_make_factoids(): \"make_factoids is a convenience", "\"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\",", "does not create more than 15 different sources.\" num_of_different_objects = 25 generator =", "data is None: counter[\"None\"] = counter.get(\"None\", 0) + 1 elif data == {}:", "string.\" # make_date might return an empty dict assert mockdata.make_date(0) is None assert", "] assert mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\",", "test_make_date(): \"Make date generates a dict consisting of a date-label and a date", "obj else: assert last_obj == obj def test_generate_statement(): \"Make sure generate_statement() works as", "assert mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7) == {\"label\": \"July 1807\",", "[ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\",", "dict assert mockdata.make_date(0) is None assert mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\": \"1801\"} assert", "i in range(1000): data = mockdata.make_date(i) if data is None: counter[\"None\"] = counter.get(\"None\",", "generator = mockdata.generate_person(num_of_different_objects) objects = {} for _ in range(num_of_different_objects * 10): obj", "== {\"label\": \"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2) == {\"label\": \"February 1802\", \"sortdate\": \"1802-02\"}", "generator = mockdata.generate_factoid() for i in range(100): factoid = next(generator) assert factoid[\"@id\"] ==", "\"Modifier 2\" assert mockdata.get_modifier(4) == \"Modifier 3\" assert mockdata.get_modifier(5) == \"Modifier 1\" assert", "to equal and if there are exactly 3 creators. \"\"\" counter = {}", "= make_factoids(250) for _ in range(10): assert data_to_compare == make_factoids(250) def test_make_factoids(): \"make_factoids", "# make sure sources with sam pid contain same data for pid, objlist", "(counter, i + 1) def test_make_date(): \"Make date generates a dict consisting of", "== make_factoids(250) def test_make_factoids(): \"make_factoids is a convenience function to create test data.\"", "factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the factoid generator. \"\"\" generator", "persons.\" num_of_different_objects = 15 generator = mockdata.generate_person(num_of_different_objects) objects = {} for _ in", "\"February 1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3) == {\"label\": \"3 March 1803\", \"sortdate\": \"1803-03-03\"}", "counter[\"yyyy\"] = counter.get(\"yyyy\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\",", "\"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7) == {\"label\": \"July 1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8)", "distribution of modifier names is close to equal and if there are exactly", "\"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\", }", "data_to_compare = make_factoids(250) for _ in range(10): assert data_to_compare == make_factoids(250) def test_make_factoids():", "test_generate_source(): \"Make sure generate_source() does not create more than 15 different sources.\" num_of_different_objects", "consisting of a date-label and a date string.\" # make_date might return an", "== \"F%dS1\" % (i + 1) def test_make_label_objects(): \"Make sure simple object consisting", "factoid[\"@id\"] == \"Factoid %03d\" % (i + 1) assert \"Person\" in factoid[\"person\"][\"@id\"] assert", "3\" assert mockdata.get_modifier(2) == \"Modifier 1\" assert mockdata.get_modifier(3) == \"Modifier 2\" assert mockdata.get_modifier(4)", "i, True) assert creation_time <= modification_time def test_idempotence(): \"Generate a mock data set", "in range(num_of_different_objects * 10): obj = next(generator) buf = objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]]", "= mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date, i, True) assert creation_time <= modification_time def", "i def test_get_datetime(): \"Test the mockdata get_date function.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\",", "} generator = mockdata.generate_statement(factoid, 1) for i in range(5): stmt = next(generator) assert", "\"2019-10-12\", \"modifiedBy\": \"User 2\", } generator = mockdata.generate_statement(factoid, 1) for i in range(5):", "mockdata.make_date(i) if data is None: counter[\"None\"] = counter.get(\"None\", 0) + 1 elif data", "objects: assert len(objects[pid]) == 10 # make sure sources with sam pid contain", "creation of mock data. \"\"\" import datetime from papilotte.connectors.mock import mockdata def test_generate_person():", "\"1807-07\"} assert mockdata.make_date(8) == {\"label\": \"8 August 1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) ==", "close to equal and if there are exactly 3 modifiers. \"\"\" counter =", "mockdata.get_modifier(4) == \"Modifier 3\" assert mockdata.get_modifier(5) == \"Modifier 1\" assert mockdata.get_modifier(6) == \"Modifier", "] base_date = datetime.datetime(2000, 1, 1) for i in range(10): assert mockdata.get_datetime(base_date, i)", "1 assert counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"]", "== factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the factoid generator. \"\"\" generator = mockdata.generate_factoid() for", "test_generate_statement(): \"Make sure generate_statement() works as expected.\" factoid = { \"@id\": \"Factoid 1\",", "def test_uris(): \"Test the mockdata get_uri function.\" assert mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\",", "\"http://example.com/3\", ] assert mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\",", "\"Test creation order of get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier 3\" assert mockdata.get_modifier(2) ==", "base_date = datetime.datetime(2000, 1, 1) for i in range(20): assert mockdata.get_datetime(base_date, i, True)", "{} def test_make_date_distribution(): \"Check if dates are equally distributed in mockdata.\" counter =", "] base_date = datetime.datetime(2000, 1, 1) for i in range(20): assert mockdata.get_datetime(base_date, i,", "\"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is None assert mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\": \"1806\"}", "\"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\",", "def test_mod_time_after_creation_time(): \"Assert modification cannot be earlier than creation\" base_date = datetime.datetime(2000, 1,", "i + 1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter, i + 1) def", "def test_get_modifier_distribution(): \"\"\"Check if distribution of modifier names is close to equal and", "== counter[\"Modifier 2\"] assert counter[\"Modifier 1\"] == counter[\"Modifier 3\"] def test_get_modifer(): \"Test creation", "2\"] assert counter[\"Modifier 1\"] == counter[\"Modifier 3\"] def test_get_modifer(): \"Test creation order of", "exactly 3 creators. \"\"\" counter = {} for i in range(1000): modifier =", "== [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3)", "\"http://example.com/xxx/%d/%d\" % (counter, i + 1) def test_make_date(): \"Make date generates a dict", "mockdata get_uri function.\" assert mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2)", "mockdata.make_date(2) == {\"label\": \"February 1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3) == {\"label\": \"3 March", "obj in enumerate(objects): assert obj[\"label\"] == \"Xxx %d_%d\" % (counter, i + 1)", "sure generate_statement() works as expected.\" factoid = { \"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\",", "{\"label\": \"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7) == {\"label\": \"July 1807\", \"sortdate\": \"1807-07\"} assert", "\"1808-08-08\"} assert mockdata.make_date(9) == {} def test_make_date_distribution(): \"Check if dates are equally distributed", "+ 1 elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) + 1 assert", "a date-label and a date string.\" # make_date might return an empty dict", "obj def test_generate_statement(): \"Make sure generate_statement() works as expected.\" factoid = { \"@id\":", "assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter, i + 1) def test_make_date(): \"Make date", "counter.get(\"yyyy\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) +", "+ 1 assert counter[\"Modifier 1\"] == counter[\"Modifier 2\"] assert counter[\"Modifier 1\"] == counter[\"Modifier", "counter[\"Modifier 1\"] == counter[\"Modifier 2\"] assert counter[\"Modifier 1\"] == counter[\"Modifier 3\"] def test_get_modifer():", "= next(generator) assert stmt[\"@id\"] == \"F1S%d\" % (i + 1) assert stmt[\"createdBy\"] ==", "generator = mockdata.generate_factoid() for _ in range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare = make_factoids(250)", "def test_generate_source(): \"Make sure generate_source() does not create more than 15 different sources.\"", "last_obj == obj def test_generate_statement(): \"Make sure generate_statement() works as expected.\" factoid =", "get_uri function.\" assert mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2) ==", "counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the mockdata get_uri function.\" assert mockdata.get_uris(1) ==", "\"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution():", "creation order of get_creator().\" for i in range(1, 6): assert mockdata.get_creator(i) == \"Creator", "a date with offset works.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\",", "test_mod_time_after_creation_time(): \"Assert modification cannot be earlier than creation\" base_date = datetime.datetime(2000, 1, 1)", "make_factoids(num): generated_factoids = [] generator = mockdata.generate_factoid() for _ in range(num): generated_factoids.append(next(generator)) return", "with offset works.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\",", "objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] = buf for pid in objects: assert len(objects[pid]) ==", "== \"Modifier 2\" assert mockdata.get_modifier(4) == \"Modifier 3\" assert mockdata.get_modifier(5) == \"Modifier 1\"", "there are exactly 3 creators. \"\"\" counter = {} for i in range(1000):", "make sure sources with sam pid contain same data for pid, objlist in", "counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"]", "make_factoids(250) def test_make_factoids(): \"make_factoids is a convenience function to create test data.\" assert", "if data is None: counter[\"None\"] = counter.get(\"None\", 0) + 1 elif data ==", "next(generator) assert stmt[\"@id\"] == \"F1S%d\" % (i + 1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"]", "\"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\",", "{} for i in range(999): modifier = mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0) +", "counter[\"Modifier 1\"] == counter[\"Modifier 3\"] def test_get_modifer(): \"Test creation order of get_modifier().\" assert", "assert mockdata.make_date(5) is None assert mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7)", "of modifier names is close to equal and if there are exactly 3", "assert len(objects[pid]) == 10 # make sure sources with sam pid contain same", "assert mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ]", "\"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000,", "(1, 4): objects = mockdata.make_label_objects(3, \"xxx\", counter) for i, obj in enumerate(objects): assert", "import mockdata def test_generate_person(): \"Make sure generate_person() doesn not create more than 15", "elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) + 1 elif data[\"sortdate\"].count(\"-\") ==", "modification_time def test_idempotence(): \"Generate a mock data set multiple times and make sure", "mockdata.\" counter = {} for i in range(1000): data = mockdata.make_date(i) if data", "1 assert counter[\"Modifier 1\"] == counter[\"Modifier 2\"] assert counter[\"Modifier 1\"] == counter[\"Modifier 3\"]", "generated_factoids data_to_compare = make_factoids(250) for _ in range(10): assert data_to_compare == make_factoids(250) def", "assert mockdata.make_date(8) == {\"label\": \"8 August 1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) == {}", "counter[\"Creator 1\"] == counter[\"Creator 4\"] assert counter[\"Creator 1\"] == counter[\"Creator 5\"] def test_get_creator():", "1 elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0) + 1 elif data[\"sortdate\"].count(\"-\")", "\"statement\" in factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i + 1) def test_make_label_objects():", "if dates are equally distributed in mockdata.\" counter = {} for i in", "than 15 different sources.\" num_of_different_objects = 25 generator = mockdata.generate_source(num_of_different_objects) objects = {}", "# make_date might return an empty dict assert mockdata.make_date(0) is None assert mockdata.make_date(1)", "stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the factoid generator. \"\"\" generator = mockdata.generate_factoid()", "factoid generator. \"\"\" generator = mockdata.generate_factoid() for i in range(100): factoid = next(generator)", "\"Test creation order of get_creator().\" for i in range(1, 6): assert mockdata.get_creator(i) ==", "== \"Modifier 1\" assert mockdata.get_modifier(3) == \"Modifier 2\" assert mockdata.get_modifier(4) == \"Modifier 3\"", "not create more than 15 different persons.\" num_of_different_objects = 15 generator = mockdata.generate_person(num_of_different_objects)", "0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] =", "== counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the mockdata get_uri function.\"", "expected.\" for counter in (1, 4): objects = mockdata.make_label_objects(3, \"xxx\", counter) for i,", "% (counter, i + 1) def test_make_date(): \"Make date generates a dict consisting", "date with offset works.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\",", "generates a dict consisting of a date-label and a date string.\" # make_date", "is close to equal and if there are exactly 3 creators. \"\"\" counter", "label and an uri or created as expected.\" for counter in (1, 4):", "\"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\",", "1 elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) + 1 assert counter[\"None\"]", "works.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\",", "assert mockdata.get_modifier(6) == \"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check if distribution of creator names", "+ 1) def test_make_label_objects(): \"Make sure simple object consisting of a label and", "4\"] assert counter[\"Creator 1\"] == counter[\"Creator 5\"] def test_get_creator(): \"Test creation order of", "mockdata.get_datetime(base_date, i, True) == expected[i] def test_mod_time_after_creation_time(): \"Assert modification cannot be earlier than", "counter[\"Creator 3\"] assert counter[\"Creator 1\"] == counter[\"Creator 4\"] assert counter[\"Creator 1\"] == counter[\"Creator", "range(20): assert mockdata.get_datetime(base_date, i, True) == expected[i] def test_mod_time_after_creation_time(): \"Assert modification cannot be", "assert \"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\" in factoid assert", "counter[\"Creator 2\"] assert counter[\"Creator 1\"] == counter[\"Creator 3\"] assert counter[\"Creator 1\"] == counter[\"Creator", "range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare = make_factoids(250) for _ in range(10): assert data_to_compare", "= buf for pid in objects: assert len(objects[pid]) == 10 # make sure", "= next(generator) assert factoid[\"@id\"] == \"Factoid %03d\" % (i + 1) assert \"Person\"", "\"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check if distribution of modifier names is", "are equally distributed in mockdata.\" counter = {} for i in range(1000): data", "in range(20): assert mockdata.get_datetime(base_date, i, True) == expected[i] def test_mod_time_after_creation_time(): \"Assert modification cannot", "are exactly 3 creators. \"\"\" counter = {} for i in range(1000): modifier", "last_obj = obj else: assert last_obj == obj def test_generate_statement(): \"Make sure generate_statement()", "return an empty dict assert mockdata.make_date(0) is None assert mockdata.make_date(1) == {\"label\": \"1801\",", "counter[\"Creator 1\"] == counter[\"Creator 3\"] assert counter[\"Creator 1\"] == counter[\"Creator 4\"] assert counter[\"Creator", "counter.get(\"None\", 0) + 1 elif data == {}: counter[\"empty\"] = counter.get(\"empty\", 0) +", "enumerate(objects): assert obj[\"label\"] == \"Xxx %d_%d\" % (counter, i + 1) assert obj[\"uri\"]", "obj def test_generate_source(): \"Make sure generate_source() does not create more than 15 different", "\"http://example.com/8\", ] assert mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\",", "factoid[\"source\"][\"@id\"] assert \"statement\" in factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i + 1)", "pid contain same data for pid, objlist in objects.items(): last_obj = None for", "mockdata def test_generate_person(): \"Make sure generate_person() doesn not create more than 15 different", "1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"]", "\"Creator %d\" % i def test_get_datetime(): \"Test the mockdata get_date function.\" expected =", "creation\" base_date = datetime.datetime(2000, 1, 1) for i in range(1000): creation_time = mockdata.get_datetime(base_date,", "factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the", "None assert mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2) == {\"label\": \"February", "in factoid[\"source\"][\"@id\"] assert \"statement\" in factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i +", "1) for i in range(1000): creation_time = mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date, i,", "for i in range(10): assert mockdata.get_datetime(base_date, i) == expected[i] def test_get_datetime_with_offset(): \"Test if", "1 elif data == {}: counter[\"empty\"] = counter.get(\"empty\", 0) + 1 elif data[\"sortdate\"].count(\"-\")", "dict consisting of a date-label and a date string.\" # make_date might return", "True) == expected[i] def test_mod_time_after_creation_time(): \"Assert modification cannot be earlier than creation\" base_date", "\"8 August 1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) == {} def test_make_date_distribution(): \"Check if", "are exactly 3 modifiers. \"\"\" counter = {} for i in range(999): modifier", "get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier 3\" assert mockdata.get_modifier(2) == \"Modifier 1\" assert mockdata.get_modifier(3)", "\"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date =", "\"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ]", "mockdata.make_date(7) == {\"label\": \"July 1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8) == {\"label\": \"8 August", "+ 1 elif data == {}: counter[\"empty\"] = counter.get(\"empty\", 0) + 1 elif", "test_get_modifer(): \"Test creation order of get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier 3\" assert mockdata.get_modifier(2)", "is None assert mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7) == {\"label\":", "mockdata.get_modifier(5) == \"Modifier 1\" assert mockdata.get_modifier(6) == \"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check if", "+ 1) def test_make_date(): \"Make date generates a dict consisting of a date-label", "[ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\",", "def test_make_label_objects(): \"Make sure simple object consisting of a label and an uri", "counter.get(modifier, 0) + 1 assert counter[\"Modifier 1\"] == counter[\"Modifier 2\"] assert counter[\"Modifier 1\"]", "names is close to equal and if there are exactly 3 creators. \"\"\"", "objects: assert len(objects[pid]) == 10 # make sure persons with same pid contain", "assert mockdata.make_date(7) == {\"label\": \"July 1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8) == {\"label\": \"8", "% i def test_get_datetime(): \"Test the mockdata get_date function.\" expected = [ \"2000-01-01T00:00:00+02:00\",", "== counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the mockdata get_uri function.\" assert mockdata.get_uris(1) == [", "obj else: assert last_obj == obj def test_generate_source(): \"Make sure generate_source() does not", "sure simple object consisting of a label and an uri or created as", "== \"F1S%d\" % (i + 1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"] ==", "assert data_to_compare == make_factoids(250) def test_make_factoids(): \"make_factoids is a convenience function to create", "modification_time = mockdata.get_datetime(base_date, i, True) assert creation_time <= modification_time def test_idempotence(): \"Generate a", "distributed in mockdata.\" counter = {} for i in range(1000): data = mockdata.make_date(i)", "test_make_label_objects(): \"Make sure simple object consisting of a label and an uri or", "\"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\",", "papilotte.connectors.mock import mockdata def test_generate_person(): \"Make sure generate_person() doesn not create more than", "in (1, 4): objects = mockdata.make_label_objects(3, \"xxx\", counter) for i, obj in enumerate(objects):", "range(num_of_different_objects * 10): obj = next(generator) buf = objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] =", "for pid in objects: assert len(objects[pid]) == 10 # make sure persons with", "15 different sources.\" num_of_different_objects = 25 generator = mockdata.generate_source(num_of_different_objects) objects = {} for", "\"F1S%d\" % (i + 1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"]", "2\" def test_get_creator_distribution(): \"\"\"Check if distribution of creator names is close to equal", "for i in range(999): modifier = mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0) + 1", "1, 1) for i in range(20): assert mockdata.get_datetime(base_date, i, True) == expected[i] def", "mockdata.generate_source(num_of_different_objects) objects = {} for _ in range(num_of_different_objects * 10): obj = next(generator)", "True) assert creation_time <= modification_time def test_idempotence(): \"Generate a mock data set multiple", "range(10): assert data_to_compare == make_factoids(250) def test_make_factoids(): \"make_factoids is a convenience function to", "== 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"]", "is None: counter[\"None\"] = counter.get(\"None\", 0) + 1 elif data == {}: counter[\"empty\"]", "\"sortdate\": \"1801\"} assert mockdata.make_date(2) == {\"label\": \"February 1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3) ==", "15 different persons.\" num_of_different_objects = 15 generator = mockdata.generate_person(num_of_different_objects) objects = {} for", "for obj in objlist: if last_obj is None: last_obj = obj else: assert", "\"Test the mockdata get_uri function.\" assert mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ]", "of creator names is close to equal and if there are exactly 3", "counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Creator 1\"] == counter[\"Creator 2\"] assert", "assert mockdata.make_date(2) == {\"label\": \"February 1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3) == {\"label\": \"3", "= mockdata.make_label_objects(3, \"xxx\", counter) for i, obj in enumerate(objects): assert obj[\"label\"] == \"Xxx", "assert mockdata.get_modifier(2) == \"Modifier 1\" assert mockdata.get_modifier(3) == \"Modifier 2\" assert mockdata.get_modifier(4) ==", "counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the mockdata get_uri", "\"Test the mockdata get_date function.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\",", "\"July 1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8) == {\"label\": \"8 August 1808\", \"sortdate\": \"1808-08-08\"}", "\"\"\"Test the factoid generator. \"\"\" generator = mockdata.generate_factoid() for i in range(100): factoid", "[ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\",", "3 creators. \"\"\" counter = {} for i in range(1000): modifier = mockdata.get_creator(i)", "\"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for", "(i + 1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"]", "mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2) == {\"label\": \"February 1802\", \"sortdate\":", "{} for _ in range(num_of_different_objects * 10): obj = next(generator) buf = objects.get(obj[\"@id\"],", "assert \"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\" in factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" %", "mockdata.make_date(8) == {\"label\": \"8 August 1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) == {} def", "if there are exactly 3 modifiers. \"\"\" counter = {} for i in", "1\" assert mockdata.get_modifier(6) == \"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check if distribution of creator", "names is close to equal and if there are exactly 3 modifiers. \"\"\"", "(counter, i + 1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter, i + 1)", "mockdata.generate_factoid() for i in range(100): factoid = next(generator) assert factoid[\"@id\"] == \"Factoid %03d\"", "and if there are exactly 3 modifiers. \"\"\" counter = {} for i", "= datetime.datetime(2000, 1, 1) for i in range(10): assert mockdata.get_datetime(base_date, i) == expected[i]", "assert counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the mockdata", "i in range(1000): creation_time = mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date, i, True) assert", "generated_factoids = [] generator = mockdata.generate_factoid() for _ in range(num): generated_factoids.append(next(generator)) return generated_factoids", "assert mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2) == [ \"http://example.com/1\",", "def make_factoids(num): generated_factoids = [] generator = mockdata.generate_factoid() for _ in range(num): generated_factoids.append(next(generator))", "\"User 2\", } generator = mockdata.generate_statement(factoid, 1) for i in range(5): stmt =", "elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0) + 1 elif data[\"sortdate\"].count(\"-\") ==", "+ 1 assert counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"] ==", "test_get_datetime_with_offset(): \"Test if getting a date with offset works.\" expected = [ \"2000-01-01T00:00:00+02:00\",", "range(5): stmt = next(generator) assert stmt[\"@id\"] == \"F1S%d\" % (i + 1) assert", "def test_generate_person(): \"Make sure generate_person() doesn not create more than 15 different persons.\"", "test_get_creator(): \"Test creation order of get_creator().\" for i in range(1, 6): assert mockdata.get_creator(i)", "objects.items(): last_obj = None for obj in objlist: if last_obj is None: last_obj", "factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"]", "last_obj is None: last_obj = obj else: assert last_obj == obj def test_generate_source():", "is None: last_obj = obj else: assert last_obj == obj def test_generate_source(): \"Make", "assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert", "data == {}: counter[\"empty\"] = counter.get(\"empty\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 0:", "expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\",", "= None for obj in objlist: if last_obj is None: last_obj = obj", "data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 2:", "return generated_factoids data_to_compare = make_factoids(250) for _ in range(10): assert data_to_compare == make_factoids(250)", "= datetime.datetime(2000, 1, 1) for i in range(1000): creation_time = mockdata.get_datetime(base_date, i) modification_time", "= next(generator) buf = objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] = buf for pid in", "= objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] = buf for pid in objects: assert len(objects[pid])", "of a date-label and a date string.\" # make_date might return an empty", "different sources.\" num_of_different_objects = 25 generator = mockdata.generate_source(num_of_different_objects) objects = {} for _", "equally distributed in mockdata.\" counter = {} for i in range(1000): data =", "def test_idempotence(): \"Generate a mock data set multiple times and make sure they", "if last_obj is None: last_obj = obj else: assert last_obj == obj def", "exactly 3 modifiers. \"\"\" counter = {} for i in range(999): modifier =", "counter[\"empty\"] = counter.get(\"empty\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] = counter.get(\"yyyy\",", "\"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\",", "\"Make sure generate_source() does not create more than 15 different sources.\" num_of_different_objects =", "modifier = mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Creator 1\"] ==", "in range(10): assert mockdata.get_datetime(base_date, i) == expected[i] def test_get_datetime_with_offset(): \"Test if getting a", "(i + 1) def test_make_label_objects(): \"Make sure simple object consisting of a label", "1) for i in range(10): assert mockdata.get_datetime(base_date, i) == expected[i] def test_get_datetime_with_offset(): \"Test", "1, 1) for i in range(10): assert mockdata.get_datetime(base_date, i) == expected[i] def test_get_datetime_with_offset():", "obj[\"label\"] == \"Xxx %d_%d\" % (counter, i + 1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\"", "of a label and an uri or created as expected.\" for counter in", "1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\", } generator", "sure generate_person() doesn not create more than 15 different persons.\" num_of_different_objects = 15", "\"Modifier 1\" assert mockdata.get_modifier(3) == \"Modifier 2\" assert mockdata.get_modifier(4) == \"Modifier 3\" assert", "2\", } generator = mockdata.generate_statement(factoid, 1) for i in range(5): stmt = next(generator)", "== {\"label\": \"February 1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3) == {\"label\": \"3 March 1803\",", "same data for pid, objlist in objects.items(): last_obj = None for obj in", "\"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i in", "August 1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) == {} def test_make_date_distribution(): \"Check if dates", "== counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"] ==", "= {} for i in range(1000): data = mockdata.make_date(i) if data is None:", "def test_make_factoids(): \"make_factoids is a convenience function to create test data.\" assert len(mockdata.make_factoids(15))", "mockdata.generate_statement(factoid, 1) for i in range(5): stmt = next(generator) assert stmt[\"@id\"] == \"F1S%d\"", "than 15 different persons.\" num_of_different_objects = 15 generator = mockdata.generate_person(num_of_different_objects) objects = {}", "counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the mockdata get_uri function.\" assert", "mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Creator 1\"] == counter[\"Creator 2\"]", "\"F%dS1\" % (i + 1) def test_make_label_objects(): \"Make sure simple object consisting of", "date generates a dict consisting of a date-label and a date string.\" #", "= [] generator = mockdata.generate_factoid() for _ in range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare", "def test_get_datetime(): \"Test the mockdata get_date function.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\",", "make_factoids(250) for _ in range(10): assert data_to_compare == make_factoids(250) def test_make_factoids(): \"make_factoids is", "in range(10): assert data_to_compare == make_factoids(250) def test_make_factoids(): \"make_factoids is a convenience function", "\"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check if distribution", "else: assert last_obj == obj def test_generate_source(): \"Make sure generate_source() does not create", "if getting a date with offset works.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\",", "== counter[\"Creator 5\"] def test_get_creator(): \"Test creation order of get_creator().\" for i in", "1) assert \"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\" in factoid", "in range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare = make_factoids(250) for _ in range(10): assert", "doesn not create more than 15 different persons.\" num_of_different_objects = 15 generator =", "mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\",", "3\" assert mockdata.get_modifier(5) == \"Modifier 1\" assert mockdata.get_modifier(6) == \"Modifier 2\" def test_get_creator_distribution():", "modifiers. \"\"\" counter = {} for i in range(999): modifier = mockdata.get_modifier(i) counter[modifier]", "= 25 generator = mockdata.generate_source(num_of_different_objects) objects = {} for _ in range(num_of_different_objects *", "\"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i in", "for i, obj in enumerate(objects): assert obj[\"label\"] == \"Xxx %d_%d\" % (counter, i", "{\"label\": \"3 March 1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is None assert mockdata.make_date(6) ==", "3\"] assert counter[\"Creator 1\"] == counter[\"Creator 4\"] assert counter[\"Creator 1\"] == counter[\"Creator 5\"]", "works as expected.\" factoid = { \"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User", "in objects: assert len(objects[pid]) == 10 # make sure persons with same pid", "\"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\",", "1\"] == counter[\"Creator 4\"] assert counter[\"Creator 1\"] == counter[\"Creator 5\"] def test_get_creator(): \"Test", "\"Make sure simple object consisting of a label and an uri or created", "mockdata.make_date(9) == {} def test_make_date_distribution(): \"Check if dates are equally distributed in mockdata.\"", "and an uri or created as expected.\" for counter in (1, 4): objects", "test_generate_factoid(): \"\"\"Test the factoid generator. \"\"\" generator = mockdata.generate_factoid() for i in range(100):", "%d\" % i def test_get_datetime(): \"Test the mockdata get_date function.\" expected = [", "\"Make sure generate_statement() works as expected.\" factoid = { \"@id\": \"Factoid 1\", \"createdWhen\":", "get_date function.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\",", "== {\"label\": \"July 1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8) == {\"label\": \"8 August 1808\",", "[] generator = mockdata.generate_factoid() for _ in range(num): generated_factoids.append(next(generator)) return generated_factoids data_to_compare =", "of get_creator().\" for i in range(1, 6): assert mockdata.get_creator(i) == \"Creator %d\" %", "== {\"label\": \"3 March 1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is None assert mockdata.make_date(6)", "in enumerate(objects): assert obj[\"label\"] == \"Xxx %d_%d\" % (counter, i + 1) assert", "\"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\" in factoid assert factoid[\"statement\"][\"@id\"]", "\"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ]", "\"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check if distribution of modifier names", "i in range(999): modifier = mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0) + 1 assert", "assert counter[\"Creator 1\"] == counter[\"Creator 5\"] def test_get_creator(): \"Test creation order of get_creator().\"", "expected[i] def test_get_datetime_with_offset(): \"Test if getting a date with offset works.\" expected =", "objlist: if last_obj is None: last_obj = obj else: assert last_obj == obj", "generator = mockdata.generate_source(num_of_different_objects) objects = {} for _ in range(num_of_different_objects * 10): obj", "assert counter[\"Modifier 1\"] == counter[\"Modifier 3\"] def test_get_modifer(): \"Test creation order of get_modifier().\"", "data set multiple times and make sure they are identical\" def make_factoids(num): generated_factoids", "sure generate_source() does not create more than 15 different sources.\" num_of_different_objects = 25", "counter.get(\"empty\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0) +", "= {} for i in range(999): modifier = mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0)", "= obj else: assert last_obj == obj def test_generate_source(): \"Make sure generate_source() does", "1) def test_make_date(): \"Make date generates a dict consisting of a date-label and", "assert \"statement\" in factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i + 1) def", "test_uris(): \"Test the mockdata get_uri function.\" assert mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\",", "= counter.get(\"yyyy-mm\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0)", "earlier than creation\" base_date = datetime.datetime(2000, 1, 1) for i in range(1000): creation_time", "mockdata.make_label_objects(3, \"xxx\", counter) for i, obj in enumerate(objects): assert obj[\"label\"] == \"Xxx %d_%d\"", "in range(1, 6): assert mockdata.get_creator(i) == \"Creator %d\" % i def test_get_datetime(): \"Test", "the mockdata get_date function.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\",", "for i in range(100): factoid = next(generator) assert factoid[\"@id\"] == \"Factoid %03d\" %", "multiple times and make sure they are identical\" def make_factoids(num): generated_factoids = []", "for i in range(5): stmt = next(generator) assert stmt[\"@id\"] == \"F1S%d\" % (i", "* 10): obj = next(generator) buf = objects.get(obj[\"@id\"], []) buf.append(obj) objects[obj[\"@id\"]] = buf", "% (i + 1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert", "\"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i in range(20): assert mockdata.get_datetime(base_date,", "assert counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def", "def test_make_date_distribution(): \"Check if dates are equally distributed in mockdata.\" counter = {}", "is close to equal and if there are exactly 3 modifiers. \"\"\" counter", "for i in range(1000): data = mockdata.make_date(i) if data is None: counter[\"None\"] =", "== counter[\"Modifier 3\"] def test_get_modifer(): \"Test creation order of get_modifier().\" assert mockdata.get_modifier(1) ==", "\"sortdate\": \"1802-02\"} assert mockdata.make_date(3) == {\"label\": \"3 March 1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5)", "def test_get_modifer(): \"Test creation order of get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier 3\" assert", "counter[\"Creator 5\"] def test_get_creator(): \"Test creation order of get_creator().\" for i in range(1,", "\"\"\"Check if distribution of creator names is close to equal and if there", "simple object consisting of a label and an uri or created as expected.\"", "None: last_obj = obj else: assert last_obj == obj def test_generate_statement(): \"Make sure", "of mock data. \"\"\" import datetime from papilotte.connectors.mock import mockdata def test_generate_person(): \"Make", "i, True) == expected[i] def test_mod_time_after_creation_time(): \"Assert modification cannot be earlier than creation\"", "counter[\"yyyy-mm-dd\"] def test_uris(): \"Test the mockdata get_uri function.\" assert mockdata.get_uris(1) == [ \"http://example.com/1\",", "\"Xxx %d_%d\" % (counter, i + 1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter,", "an empty dict assert mockdata.make_date(0) is None assert mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\":", "of get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier 3\" assert mockdata.get_modifier(2) == \"Modifier 1\" assert", "= mockdata.get_datetime(base_date, i, True) assert creation_time <= modification_time def test_idempotence(): \"Generate a mock", "1) def test_make_label_objects(): \"Make sure simple object consisting of a label and an", "\"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\",", "datetime.datetime(2000, 1, 1) for i in range(20): assert mockdata.get_datetime(base_date, i, True) == expected[i]", "== \"Factoid %03d\" % (i + 1) assert \"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\"", "assert counter[\"Creator 1\"] == counter[\"Creator 2\"] assert counter[\"Creator 1\"] == counter[\"Creator 3\"] assert", "0) + 1 elif data == {}: counter[\"empty\"] = counter.get(\"empty\", 0) + 1", "len(objects[pid]) == 10 # make sure sources with sam pid contain same data", "data. \"\"\" import datetime from papilotte.connectors.mock import mockdata def test_generate_person(): \"Make sure generate_person()", "not create more than 15 different sources.\" num_of_different_objects = 25 generator = mockdata.generate_source(num_of_different_objects)", "== 0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"]", "mockdata.get_modifier(3) == \"Modifier 2\" assert mockdata.get_modifier(4) == \"Modifier 3\" assert mockdata.get_modifier(5) == \"Modifier", "\"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000,", "base_date = datetime.datetime(2000, 1, 1) for i in range(1000): creation_time = mockdata.get_datetime(base_date, i)", "1\"] == counter[\"Modifier 3\"] def test_get_modifer(): \"Test creation order of get_modifier().\" assert mockdata.get_modifier(1)", "== factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test", "are identical\" def make_factoids(num): generated_factoids = [] generator = mockdata.generate_factoid() for _ in", "object consisting of a label and an uri or created as expected.\" for", "stmt = next(generator) assert stmt[\"@id\"] == \"F1S%d\" % (i + 1) assert stmt[\"createdBy\"]", "pid, objlist in objects.items(): last_obj = None for obj in objlist: if last_obj", "date string.\" # make_date might return an empty dict assert mockdata.make_date(0) is None", "None for obj in objlist: if last_obj is None: last_obj = obj else:", "[ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3) ==", "i in range(20): assert mockdata.get_datetime(base_date, i, True) == expected[i] def test_mod_time_after_creation_time(): \"Assert modification", "as expected.\" factoid = { \"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\",", "\"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\" in factoid assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i", "assert obj[\"label\"] == \"Xxx %d_%d\" % (counter, i + 1) assert obj[\"uri\"] ==", "assert creation_time <= modification_time def test_idempotence(): \"Generate a mock data set multiple times", "1\"] == counter[\"Modifier 2\"] assert counter[\"Modifier 1\"] == counter[\"Modifier 3\"] def test_get_modifer(): \"Test", "4): objects = mockdata.make_label_objects(3, \"xxx\", counter) for i, obj in enumerate(objects): assert obj[\"label\"]", "== \"http://example.com/xxx/%d/%d\" % (counter, i + 1) def test_make_date(): \"Make date generates a", "== {}: counter[\"empty\"] = counter.get(\"empty\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"]", "\"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\",", "pid in objects: assert len(objects[pid]) == 10 # make sure sources with sam", "[ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date", "\"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date =", "10 # make sure persons with same pid contain same data for pid,", "{}: counter[\"empty\"] = counter.get(\"empty\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] =", "\"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i in range(20): assert", "1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) == {} def test_make_date_distribution(): \"Check if dates are", "assert counter[\"Modifier 1\"] == counter[\"Modifier 2\"] assert counter[\"Modifier 1\"] == counter[\"Modifier 3\"] def", "assert mockdata.get_modifier(4) == \"Modifier 3\" assert mockdata.get_modifier(5) == \"Modifier 1\" assert mockdata.get_modifier(6) ==", "assert last_obj == obj def test_generate_statement(): \"Make sure generate_statement() works as expected.\" factoid", "buf for pid in objects: assert len(objects[pid]) == 10 # make sure sources", "== {\"label\": \"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7) == {\"label\": \"July 1807\", \"sortdate\": \"1807-07\"}", "assert mockdata.make_date(9) == {} def test_make_date_distribution(): \"Check if dates are equally distributed in", "from papilotte.connectors.mock import mockdata def test_generate_person(): \"Make sure generate_person() doesn not create more", "creators. \"\"\" counter = {} for i in range(1000): modifier = mockdata.get_creator(i) counter[modifier]", "6): assert mockdata.get_creator(i) == \"Creator %d\" % i def test_get_datetime(): \"Test the mockdata", "\"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for i in range(20):", "range(1000): modifier = mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Creator 1\"]", "0) + 1 elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) + 1", "be earlier than creation\" base_date = datetime.datetime(2000, 1, 1) for i in range(1000):", "creation_time = mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date, i, True) assert creation_time <= modification_time", "mockdata.get_uris(1) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\",", "\"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000, 1,", "stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the factoid generator.", "expected[i] def test_mod_time_after_creation_time(): \"Assert modification cannot be earlier than creation\" base_date = datetime.datetime(2000,", "counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) + 1 assert counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"] ==", "created as expected.\" for counter in (1, 4): objects = mockdata.make_label_objects(3, \"xxx\", counter)", "they are identical\" def make_factoids(num): generated_factoids = [] generator = mockdata.generate_factoid() for _", "{ \"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User", "mockdata.generate_person(num_of_different_objects) objects = {} for _ in range(num_of_different_objects * 10): obj = next(generator)", "creation_time <= modification_time def test_idempotence(): \"Generate a mock data set multiple times and", "is None: last_obj = obj else: assert last_obj == obj def test_generate_statement(): \"Make", "if there are exactly 3 creators. \"\"\" counter = {} for i in", "function.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-02T10:17:36+02:00\", \"2000-01-03T20:35:12+02:00\", \"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\",", "assert mockdata.get_modifier(5) == \"Modifier 1\" assert mockdata.get_modifier(6) == \"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check", "mockdata.get_modifier(2) == \"Modifier 1\" assert mockdata.get_modifier(3) == \"Modifier 2\" assert mockdata.get_modifier(4) == \"Modifier", "i) modification_time = mockdata.get_datetime(base_date, i, True) assert creation_time <= modification_time def test_idempotence(): \"Generate", "different persons.\" num_of_different_objects = 15 generator = mockdata.generate_person(num_of_different_objects) objects = {} for _", "empty dict assert mockdata.make_date(0) is None assert mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\": \"1801\"}", "test_make_factoids(): \"make_factoids is a convenience function to create test data.\" assert len(mockdata.make_factoids(15)) ==", "1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\", } generator = mockdata.generate_statement(factoid, 1) for i", "{} for i in range(1000): modifier = mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0) +", "1\" assert mockdata.get_modifier(3) == \"Modifier 2\" assert mockdata.get_modifier(4) == \"Modifier 3\" assert mockdata.get_modifier(5)", "0) + 1 assert counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"]", "for pid, objlist in objects.items(): last_obj = None for obj in objlist: if", "else: assert last_obj == obj def test_generate_statement(): \"Make sure generate_statement() works as expected.\"", "in range(5): stmt = next(generator) assert stmt[\"@id\"] == \"F1S%d\" % (i + 1)", "mock data set multiple times and make sure they are identical\" def make_factoids(num):", "in range(1000): data = mockdata.make_date(i) if data is None: counter[\"None\"] = counter.get(\"None\", 0)", "might return an empty dict assert mockdata.make_date(0) is None assert mockdata.make_date(1) == {\"label\":", "def test_generate_statement(): \"Make sure generate_statement() works as expected.\" factoid = { \"@id\": \"Factoid", "assert mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2) == {\"label\": \"February 1802\",", "1\"] == counter[\"Creator 5\"] def test_get_creator(): \"Test creation order of get_creator().\" for i", "assert factoid[\"statement\"][\"@id\"] == \"F%dS1\" % (i + 1) def test_make_label_objects(): \"Make sure simple", "is None assert mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2) == {\"label\":", "assert mockdata.get_modifier(3) == \"Modifier 2\" assert mockdata.get_modifier(4) == \"Modifier 3\" assert mockdata.get_modifier(5) ==", "\"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\",", "\"1801\"} assert mockdata.make_date(2) == {\"label\": \"February 1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3) == {\"label\":", "for i in range(1000): modifier = mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0) + 1", "sure sources with sam pid contain same data for pid, objlist in objects.items():", "counter.get(\"yyyy-mm-dd\", 0) + 1 assert counter[\"None\"] == counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"] assert", "assert mockdata.get_modifier(1) == \"Modifier 3\" assert mockdata.get_modifier(2) == \"Modifier 1\" assert mockdata.get_modifier(3) ==", "the factoid generator. \"\"\" generator = mockdata.generate_factoid() for i in range(100): factoid =", "counter in (1, 4): objects = mockdata.make_label_objects(3, \"xxx\", counter) for i, obj in", "March 1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is None assert mockdata.make_date(6) == {\"label\": \"1806\",", "== counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"] def test_uris(): \"Test", "\"1802-02\"} assert mockdata.make_date(3) == {\"label\": \"3 March 1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is", "i) == expected[i] def test_get_datetime_with_offset(): \"Test if getting a date with offset works.\"", "= counter.get(\"empty\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 0: counter[\"yyyy\"] = counter.get(\"yyyy\", 0)", "= mockdata.generate_person(num_of_different_objects) objects = {} for _ in range(num_of_different_objects * 10): obj =", "i in range(1000): modifier = mockdata.get_creator(i) counter[modifier] = counter.get(modifier, 0) + 1 assert", "assert mockdata.make_date(0) is None assert mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2)", "= mockdata.make_date(i) if data is None: counter[\"None\"] = counter.get(\"None\", 0) + 1 elif", "% (counter, i + 1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter, i +", "\"sortdate\": \"1807-07\"} assert mockdata.make_date(8) == {\"label\": \"8 August 1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9)", "2\"] assert counter[\"Creator 1\"] == counter[\"Creator 3\"] assert counter[\"Creator 1\"] == counter[\"Creator 4\"]", "\"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000, 1, 1) for", "for i in range(1000): creation_time = mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date, i, True)", "equal and if there are exactly 3 creators. \"\"\" counter = {} for", "import datetime from papilotte.connectors.mock import mockdata def test_generate_person(): \"Make sure generate_person() doesn not", "{\"label\": \"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2) == {\"label\": \"February 1802\", \"sortdate\": \"1802-02\"} assert", "None: counter[\"None\"] = counter.get(\"None\", 0) + 1 elif data == {}: counter[\"empty\"] =", "and make sure they are identical\" def make_factoids(num): generated_factoids = [] generator =", "than creation\" base_date = datetime.datetime(2000, 1, 1) for i in range(1000): creation_time =", "\"http://example.com/2\", \"http://example.com/3\", ] assert mockdata.get_uris(2) == [ \"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\",", "for _ in range(num_of_different_objects * 10): obj = next(generator) buf = objects.get(obj[\"@id\"], [])", "\"3 March 1803\", \"sortdate\": \"1803-03-03\"} assert mockdata.make_date(5) is None assert mockdata.make_date(6) == {\"label\":", "== \"Creator %d\" % i def test_get_datetime(): \"Test the mockdata get_date function.\" expected", "make sure persons with same pid contain same data for pid, objlist in", "counter[\"None\"] = counter.get(\"None\", 0) + 1 elif data == {}: counter[\"empty\"] = counter.get(\"empty\",", "\"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\", } generator = mockdata.generate_statement(factoid,", "elif data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) + 1 assert counter[\"None\"] ==", "mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date, i, True) assert creation_time <= modification_time def test_idempotence():", "objects[obj[\"@id\"]] = buf for pid in objects: assert len(objects[pid]) == 10 # make", "(i + 1) assert \"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\"", "+ 1 assert counter[\"Creator 1\"] == counter[\"Creator 2\"] assert counter[\"Creator 1\"] == counter[\"Creator", "\"Modifier 3\" assert mockdata.get_modifier(2) == \"Modifier 1\" assert mockdata.get_modifier(3) == \"Modifier 2\" assert", "last_obj = None for obj in objlist: if last_obj is None: last_obj =", "in range(1000): creation_time = mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date, i, True) assert creation_time", "\"\"\"Check if distribution of modifier names is close to equal and if there", "range(1000): creation_time = mockdata.get_datetime(base_date, i) modification_time = mockdata.get_datetime(base_date, i, True) assert creation_time <=", "== counter[\"Creator 2\"] assert counter[\"Creator 1\"] == counter[\"Creator 3\"] assert counter[\"Creator 1\"] ==", "contain same data for pid, objlist in objects.items(): last_obj = None for obj", "buf.append(obj) objects[obj[\"@id\"]] = buf for pid in objects: assert len(objects[pid]) == 10 #", "== \"Modifier 1\" assert mockdata.get_modifier(6) == \"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check if distribution", "def test_get_creator_distribution(): \"\"\"Check if distribution of creator names is close to equal and", "make sure they are identical\" def make_factoids(num): generated_factoids = [] generator = mockdata.generate_factoid()", "num_of_different_objects = 25 generator = mockdata.generate_source(num_of_different_objects) objects = {} for _ in range(num_of_different_objects", "= counter.get(modifier, 0) + 1 assert counter[\"Creator 1\"] == counter[\"Creator 2\"] assert counter[\"Creator", "\"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\", ] def test_get_modifier_distribution(): \"\"\"Check if", "= counter.get(\"yyyy\", 0) + 1 elif data[\"sortdate\"].count(\"-\") == 1: counter[\"yyyy-mm\"] = counter.get(\"yyyy-mm\", 0)", "create more than 15 different persons.\" num_of_different_objects = 15 generator = mockdata.generate_person(num_of_different_objects) objects", "\"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\",", "\"2000-01-23T23:09:52+02:00\", \"2000-02-03T15:40:48+02:00\", \"2000-02-16T04:38:24+02:00\", \"2000-01-15T06:56:00+02:00\", \"2000-01-26T21:40:16+02:00\", \"2000-02-09T08:51:12+02:00\", \"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\",", "== expected[i] def test_mod_time_after_creation_time(): \"Assert modification cannot be earlier than creation\" base_date =", "counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Modifier 1\"] == counter[\"Modifier 2\"] assert", "\"http://example.com/1\", \"http://example.com/2\", \"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", ] assert mockdata.get_uris(3) == [", "in objects.items(): last_obj = None for obj in objlist: if last_obj is None:", "i in range(5): stmt = next(generator) assert stmt[\"@id\"] == \"F1S%d\" % (i +", "== \"Modifier 2\" def test_get_creator_distribution(): \"\"\"Check if distribution of creator names is close", "assert stmt[\"createdWhen\"] == factoid[\"createdWhen\"] assert stmt[\"modifiedBy\"] == factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def", "\"http://example.com/3\", \"http://example.com/4\", \"http://example.com/5\", \"http://example.com/6\", \"http://example.com/7\", \"http://example.com/8\", \"http://example.com/9\", \"http://example.com/10\", \"http://example.com/11\", \"http://example.com/12\", \"http://example.com/13\", \"http://example.com/14\", \"http://example.com/15\",", "datetime.datetime(2000, 1, 1) for i in range(10): assert mockdata.get_datetime(base_date, i) == expected[i] def", "expected.\" factoid = { \"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\":", "\"\"\" counter = {} for i in range(999): modifier = mockdata.get_modifier(i) counter[modifier] =", "in factoid[\"person\"][\"@id\"] assert \"Source\" in factoid[\"source\"][\"@id\"] assert \"statement\" in factoid assert factoid[\"statement\"][\"@id\"] ==", "last_obj == obj def test_generate_source(): \"Make sure generate_source() does not create more than", "{\"label\": \"July 1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8) == {\"label\": \"8 August 1808\", \"sortdate\":", "\"Check if dates are equally distributed in mockdata.\" counter = {} for i", "= { \"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\":", "data[\"sortdate\"].count(\"-\") == 2: counter[\"yyyy-mm-dd\"] = counter.get(\"yyyy-mm-dd\", 0) + 1 assert counter[\"None\"] == counter[\"empty\"]", "assert stmt[\"@id\"] == \"F1S%d\" % (i + 1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert", "counter[\"empty\"] assert counter[\"None\"] == counter[\"yyyy\"] assert counter[\"None\"] == counter[\"yyyy-mm\"] assert counter[\"None\"] == counter[\"yyyy-mm-dd\"]", "mockdata.get_modifier(1) == \"Modifier 3\" assert mockdata.get_modifier(2) == \"Modifier 1\" assert mockdata.get_modifier(3) == \"Modifier", "assert counter[\"Creator 1\"] == counter[\"Creator 3\"] assert counter[\"Creator 1\"] == counter[\"Creator 4\"] assert", "\"2000-02-24T16:28:48+02:00\", \"2000-03-12T20:33:04+02:00\", \"2000-01-22T10:24:00+02:00\", \"2000-02-07T16:14:56+02:00\", \"2000-02-25T18:32:32+02:00\", \"2000-03-16T17:16:48+02:00\", \"2000-04-07T12:27:44+02:00\", ] base_date = datetime.datetime(2000, 1, 1)", "\"createdBy\": \"User 1\", \"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\", } generator = mockdata.generate_statement(factoid, 1)", "if distribution of modifier names is close to equal and if there are", "modifier = mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Modifier 1\"] ==", "generate_statement() works as expected.\" factoid = { \"@id\": \"Factoid 1\", \"createdWhen\": \"2019-07-21\", \"createdBy\":", "factoid = next(generator) assert factoid[\"@id\"] == \"Factoid %03d\" % (i + 1) assert", "objlist in objects.items(): last_obj = None for obj in objlist: if last_obj is", "\"2000-01-05T06:52:48+02:00\", \"2000-01-06T17:10:24+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-09T13:45:36+02:00\", \"2000-01-11T00:03:12+02:00\", \"2000-01-12T10:20:48+02:00\", \"2000-01-13T20:38:24+02:00\", ] base_date = datetime.datetime(2000, 1, 1)", "mockdata.make_date(5) is None assert mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7) ==", "None assert mockdata.make_date(6) == {\"label\": \"1806\", \"sortdate\": \"1806\"} assert mockdata.make_date(7) == {\"label\": \"July", "date-label and a date string.\" # make_date might return an empty dict assert", "assert len(objects[pid]) == 10 # make sure persons with same pid contain same", "25 generator = mockdata.generate_source(num_of_different_objects) objects = {} for _ in range(num_of_different_objects * 10):", "offset works.\" expected = [ \"2000-01-01T00:00:00+02:00\", \"2000-01-03T08:30:56+02:00\", \"2000-01-07T13:28:32+02:00\", \"2000-01-13T14:52:48+02:00\", \"2000-01-21T12:43:44+02:00\", \"2000-01-08T03:28:00+02:00\", \"2000-01-15T03:05:36+02:00\", \"2000-01-23T23:09:52+02:00\",", "== factoid[\"modifiedBy\"] assert stmt[\"modifiedWhen\"] == factoid[\"modifiedWhen\"] def test_generate_factoid(): \"\"\"Test the factoid generator. \"\"\"", "a date string.\" # make_date might return an empty dict assert mockdata.make_date(0) is", "mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Modifier 1\"] == counter[\"Modifier 2\"]", "1802\", \"sortdate\": \"1802-02\"} assert mockdata.make_date(3) == {\"label\": \"3 March 1803\", \"sortdate\": \"1803-03-03\"} assert", "\"modifiedWhen\": \"2019-10-12\", \"modifiedBy\": \"User 2\", } generator = mockdata.generate_statement(factoid, 1) for i in", "\"Factoid %03d\" % (i + 1) assert \"Person\" in factoid[\"person\"][\"@id\"] assert \"Source\" in", "in objects: assert len(objects[pid]) == 10 # make sure sources with sam pid", "%d_%d\" % (counter, i + 1) assert obj[\"uri\"] == \"http://example.com/xxx/%d/%d\" % (counter, i", "stmt[\"@id\"] == \"F1S%d\" % (i + 1) assert stmt[\"createdBy\"] == factoid[\"createdBy\"] assert stmt[\"createdWhen\"]", "for counter in (1, 4): objects = mockdata.make_label_objects(3, \"xxx\", counter) for i, obj", "test_get_modifier_distribution(): \"\"\"Check if distribution of modifier names is close to equal and if", "= counter.get(modifier, 0) + 1 assert counter[\"Modifier 1\"] == counter[\"Modifier 2\"] assert counter[\"Modifier", "i in range(10): assert mockdata.get_datetime(base_date, i) == expected[i] def test_get_datetime_with_offset(): \"Test if getting", "a label and an uri or created as expected.\" for counter in (1,", "\"Make sure generate_person() doesn not create more than 15 different persons.\" num_of_different_objects =", "i + 1) def test_make_date(): \"Make date generates a dict consisting of a", "sam pid contain same data for pid, objlist in objects.items(): last_obj = None", "order of get_modifier().\" assert mockdata.get_modifier(1) == \"Modifier 3\" assert mockdata.get_modifier(2) == \"Modifier 1\"", "creator names is close to equal and if there are exactly 3 creators.", "mockdata.make_date(0) is None assert mockdata.make_date(1) == {\"label\": \"1801\", \"sortdate\": \"1801\"} assert mockdata.make_date(2) ==", "generator = mockdata.generate_statement(factoid, 1) for i in range(5): stmt = next(generator) assert stmt[\"@id\"]", "in range(999): modifier = mockdata.get_modifier(i) counter[modifier] = counter.get(modifier, 0) + 1 assert counter[\"Modifier", "\"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) == {} def test_make_date_distribution(): \"Check if dates are equally", "== {\"label\": \"8 August 1808\", \"sortdate\": \"1808-08-08\"} assert mockdata.make_date(9) == {} def test_make_date_distribution():", "\"sortdate\": \"1806\"} assert mockdata.make_date(7) == {\"label\": \"July 1807\", \"sortdate\": \"1807-07\"} assert mockdata.make_date(8) ==", "# make sure persons with same pid contain same data for pid, objlist", "or created as expected.\" for counter in (1, 4): objects = mockdata.make_label_objects(3, \"xxx\"," ]
[ "adds tags to instances launched in an Ocean cluster. :param pulumi.Input[str] user_data: Base64-encoded", "from .. import utilities, tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes the Ocean", "the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved instances exist, OCean will utilize", "not edit by hand unless you're certain you know what you are doing!", "iam role. \"\"\" image_id: pulumi.Output[str] \"\"\" ID of the image used to launch", "The instance profile iam role. :param pulumi.Input[str] image_id: ID of the image used", "not isinstance(resource_name, str): raise TypeError('Expected resource name to be a string') if opts", "of __name__ is deprecated\", DeprecationWarning) resource_name = __name__ if __opts__ is not None:", "you are doing! *** import json import warnings import pulumi import pulumi.runtime from", "instance profile iam role. :param pulumi.Input[str] image_id: ID of the image used to", "The key pair to attach the instances. :param pulumi.Input[int] max_size: The upper limit", "in the Ocean cluster. Cannot be configured if `blacklist` is configured. \"\"\" if", "\"\"\" The key pair to attach the instances. \"\"\" max_size: pulumi.Output[int] \"\"\" The", "deprecated\", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn(\"explicit use of", "cluster will run in. :param pulumi.Input[list] security_groups: One or more security group ids.", "__opts__ is deprecated, use 'opts' instead\", DeprecationWarning) opts = __opts__ if not resource_name:", "None: raise TypeError('Missing required property subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags'] = tags __props__['user_data']", "Min 0, max 100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\" A comma-separated list of subnet", "ids. :param pulumi.Input[float] spot_percentage: The percentage of Spot instances the cluster should maintain.", "the cluster can scale down to. \"\"\" name: pulumi.Output[str] \"\"\" The cluster name.", "str): raise TypeError('Expected resource name to be a string') if opts and not", "pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') __props__ = dict()", "make available to the instances. :param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, OCean", "Spot instance markets are available, enable Ocean to launch On-Demand instances instead. :param", "__name__ is not None: warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning) resource_name =", "is not None: warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning)", "*** # *** Do not edit by hand unless you're certain you know", "should be configured with auto assign public ip. :param pulumi.Input[list] tags: Optionally adds", "warnings import pulumi import pulumi.runtime from .. import utilities, tables class Ocean(pulumi.CustomResource): autoscaler:", "One or more security group ids. :param pulumi.Input[float] spot_percentage: The percentage of Spot", "security_groups is None: raise TypeError('Missing required property security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage'] =", "If Reserved instances exist, OCean will utilize them before launching Spot instances. \"\"\"", "Base64-encoded MIME user data to make available to the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool]", "and not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance')", "The lower limit of instances the cluster can scale down to. \"\"\" name:", "percentage of Spot instances the cluster should maintain. Min 0, max 100. :param", "pulumi.Input[int] desired_capacity: The number of instances to launch and maintain in the cluster.", "= image_id __props__['key_name'] = key_name __props__['max_size'] = max_size __props__['min_size'] = min_size __props__['name'] =", "Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity: The number of instances to launch and maintain", "instances launched in an Ocean cluster. :param pulumi.Input[str] user_data: Base64-encoded MIME user data", "key pair to attach the instances. \"\"\" max_size: pulumi.Output[int] \"\"\" The upper limit", ":param pulumi.Input[int] min_size: The lower limit of instances the cluster can scale down", "def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or", "= autoscaler __props__['blacklists'] = blacklists __props__['controller_id'] = controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] =", "__props__['controller_id'] = controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id']", "__props__['tags'] = tags __props__['user_data'] = user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean,", "The lower limit of instances the cluster can scale down to. :param pulumi.Input[str]", "string') if opts and not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be", "Ocean cluster. Subnet IDs should be configured with auto assign public ip. :param", "\"\"\" The number of instances to launch and maintain in the cluster. \"\"\"", "launch and maintain in the cluster. :param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance", "None: warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning) resource_name = __name__ if __opts__", "__props__['image_id'] = image_id __props__['key_name'] = key_name __props__['max_size'] = max_size __props__['min_size'] = min_size __props__['name']", "import utilities, tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes the Ocean Kubernetes autoscaler.", "instances the cluster can scale down to. \"\"\" name: pulumi.Output[str] \"\"\" The cluster", "class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes the Ocean Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list]", "be configured if `blacklist` is configured. \"\"\" def __init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None,", "Cannot be configured if `blacklist` is configured. \"\"\" if __name__ is not None:", "__props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] = image_id __props__['key_name']", "of subnet identifiers for the Ocean cluster. Subnet IDs should be configured with", "the cluster will run in. :param pulumi.Input[list] security_groups: One or more security group", "instances the cluster should maintain. Min 0, max 100. :param pulumi.Input[list] subnet_ids: A", "The ocean cluster identifier. Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity: The number of instances", "pulumi.Input[list] subnet_ids: A comma-separated list of subnet identifiers for the Ocean cluster. Subnet", "the instances. \"\"\" key_name: pulumi.Output[str] \"\"\" The key pair to attach the instances.", "scale up to. \"\"\" min_size: pulumi.Output[int] \"\"\" The lower limit of instances the", "pulumi.Output[str] \"\"\" The ocean cluster identifier. Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\" The", "spot_percentage if subnet_ids is None: raise TypeError('Missing required property subnet_ids') __props__['subnet_ids'] = subnet_ids", "instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved instances exist, OCean will utilize them", "iam_instance_profile __props__['image_id'] = image_id __props__['key_name'] = key_name __props__['max_size'] = max_size __props__['min_size'] = min_size", "the Ocean cluster. Subnet IDs should be configured with auto assign public ip.", "make available to the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved instances exist,", "= dict() __props__['autoscaler'] = autoscaler __props__['blacklists'] = blacklists __props__['controller_id'] = controller_id __props__['desired_capacity'] =", "\"\"\" controller_id: pulumi.Output[str] \"\"\" The ocean cluster identifier. Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int]", "instances the cluster can scale up to. :param pulumi.Input[int] min_size: The lower limit", "adds tags to instances launched in an Ocean cluster. \"\"\" user_data: pulumi.Output[str] \"\"\"", "autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\" Instance types not allowed in the Ocean cluster.", "= subnet_ids __props__['tags'] = tags __props__['user_data'] = user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] =", "(tfgen) Tool. *** # *** Do not edit by hand unless you're certain", "= __opts__ if not resource_name: raise TypeError('Missing resource name argument (for URN creation)')", "pulumi.Output[list] \"\"\" Instance types not allowed in the Ocean cluster. Cannot be configured", "name argument (for URN creation)') if not isinstance(resource_name, str): raise TypeError('Expected resource name", "`ocean.k8s` :param pulumi.Input[int] desired_capacity: The number of instances to launch and maintain in", "Provides a Spotinst Ocean AWS resource. :param str resource_name: The name of the", "or more security group ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\" The percentage of Spot", "subnet_ids: pulumi.Output[list] \"\"\" A comma-separated list of subnet identifiers for the Ocean cluster.", "TypeError('Expected resource options to be a ResourceOptions instance') __props__ = dict() __props__['autoscaler'] =", "to be a ResourceOptions instance') __props__ = dict() __props__['autoscaler'] = autoscaler __props__['blacklists'] =", "name __props__['region'] = region if security_groups is None: raise TypeError('Missing required property security_groups')", "min_size=None, name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\"", "__props__['name'] = name __props__['region'] = region if security_groups is None: raise TypeError('Missing required", "# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen)", "resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None, name=None,", ":param pulumi.Input[list] tags: Optionally adds tags to instances launched in an Ocean cluster.", ":param pulumi.Input[str] name: The cluster name. :param pulumi.Input[str] region: The region the cluster", ":param pulumi.Input[dict] autoscaler: Describes the Ocean Kubernetes autoscaler. :param pulumi.Input[list] blacklists: Instance types", "instances. \"\"\" whitelists: pulumi.Output[list] \"\"\" Instance types allowed in the Ocean cluster. Cannot", "Spot instance markets are available, enable Ocean to launch On-Demand instances instead. \"\"\"", "The ocean cluster identifier. Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\" The number of", "pulumi.Output[str] \"\"\" ID of the image used to launch the instances. \"\"\" key_name:", "Ocean to launch On-Demand instances instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The instance profile", "of instances the cluster can scale up to. \"\"\" min_size: pulumi.Output[int] \"\"\" The", "pulumi.Input[str] controller_id: The ocean cluster identifier. Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity: The number", "= spot_percentage if subnet_ids is None: raise TypeError('Missing required property subnet_ids') __props__['subnet_ids'] =", "are available, enable Ocean to launch On-Demand instances instead. :param pulumi.Input[str] iam_instance_profile: The", "name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\" Provides", "to attach the instances. \"\"\" max_size: pulumi.Output[int] \"\"\" The upper limit of instances", "instances exist, OCean will utilize them before launching Spot instances. :param pulumi.Input[list] whitelists:", "whitelists: Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist`", "region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\" Provides a", "\"\"\" A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs", "The upper limit of instances the cluster can scale up to. \"\"\" min_size:", "0, max 100. :param pulumi.Input[list] subnet_ids: A comma-separated list of subnet identifiers for", "configured. \"\"\" if __name__ is not None: warnings.warn(\"explicit use of __name__ is deprecated\",", ":param pulumi.Input[str] controller_id: The ocean cluster identifier. Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity: The", "__props__, opts) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return", "blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None, name=None, region=None, security_groups=None, spot_percentage=None,", "= desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] = image_id __props__['key_name'] =", "certain you know what you are doing! *** import json import warnings import", "cluster can scale down to. :param pulumi.Input[str] name: The cluster name. :param pulumi.Input[str]", "pair to attach the instances. :param pulumi.Input[int] max_size: The upper limit of instances", "tags __props__['user_data'] = user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean',", "the cluster can scale up to. :param pulumi.Input[int] min_size: The lower limit of", "name: pulumi.Output[str] \"\"\" The cluster name. \"\"\" region: pulumi.Output[str] \"\"\" The region the", "opts: Options for the resource. :param pulumi.Input[dict] autoscaler: Describes the Ocean Kubernetes autoscaler.", "OCean will utilize them before launching Spot instances. :param pulumi.Input[list] whitelists: Instance types", "resource_name: raise TypeError('Missing resource name argument (for URN creation)') if not isinstance(resource_name, str):", "to make available to the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved instances", "pulumi.Output[str] \"\"\" The region the cluster will run in. \"\"\" security_groups: pulumi.Output[list] \"\"\"", "= security_groups __props__['spot_percentage'] = spot_percentage if subnet_ids is None: raise TypeError('Missing required property", ":param pulumi.Input[str] user_data: Base64-encoded MIME user data to make available to the instances.", "a Spotinst Ocean AWS resource. :param str resource_name: The name of the resource.", "group ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\" The percentage of Spot instances the cluster", "resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[dict] autoscaler: Describes the", "key pair to attach the instances. :param pulumi.Input[int] max_size: The upper limit of", "\"\"\" Base64-encoded MIME user data to make available to the instances. \"\"\" utilize_reserved_instances:", "identifier. Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\" The number of instances to launch", "pulumi.Input[str] name: The cluster name. :param pulumi.Input[str] region: The region the cluster will", "pulumi.Output[int] \"\"\" The upper limit of instances the cluster can scale up to.", "launched in an Ocean cluster. :param pulumi.Input[str] user_data: Base64-encoded MIME user data to", "should maintain. Min 0, max 100. :param pulumi.Input[list] subnet_ids: A comma-separated list of", "pulumi.Output[list] \"\"\" A comma-separated list of subnet identifiers for the Ocean cluster. Subnet", "argument (for URN creation)') if not isinstance(resource_name, str): raise TypeError('Expected resource name to", "name. :param pulumi.Input[str] region: The region the cluster will run in. :param pulumi.Input[list]", "\"\"\" max_size: pulumi.Output[int] \"\"\" The upper limit of instances the cluster can scale", ":param pulumi.Input[float] spot_percentage: The percentage of Spot instances the cluster should maintain. Min", "up to. :param pulumi.Input[int] min_size: The lower limit of instances the cluster can", "if not resource_name: raise TypeError('Missing resource name argument (for URN creation)') if not", "\"\"\" desired_capacity: pulumi.Output[int] \"\"\" The number of instances to launch and maintain in", "if security_groups is None: raise TypeError('Missing required property security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage']", "instance markets are available, enable Ocean to launch On-Demand instances instead. :param pulumi.Input[str]", "iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None, name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None,", ":param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[dict] autoscaler: Describes the Ocean", "__props__ = dict() __props__['autoscaler'] = autoscaler __props__['blacklists'] = blacklists __props__['controller_id'] = controller_id __props__['desired_capacity']", "Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless", "\"\"\" ID of the image used to launch the instances. \"\"\" key_name: pulumi.Output[str]", "\"\"\" min_size: pulumi.Output[int] \"\"\" The lower limit of instances the cluster can scale", "is configured. \"\"\" controller_id: pulumi.Output[str] \"\"\" The ocean cluster identifier. Example: `ocean.k8s` \"\"\"", "you're certain you know what you are doing! *** import json import warnings", "The number of instances to launch and maintain in the cluster. :param pulumi.Input[bool]", "launched in an Ocean cluster. \"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME user data", "security_groups: pulumi.Output[list] \"\"\" One or more security group ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\"", "On-Demand instances instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The instance profile iam role. \"\"\"", "know what you are doing! *** import json import warnings import pulumi import", "to the instances. :param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, OCean will utilize", "controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None, name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None,", "to launch and maintain in the cluster. :param pulumi.Input[bool] fallback_to_ondemand: If not Spot", "iam_instance_profile: pulumi.Output[str] \"\"\" The instance profile iam role. \"\"\" image_id: pulumi.Output[str] \"\"\" ID", "\"\"\" The cluster name. \"\"\" region: pulumi.Output[str] \"\"\" The region the cluster will", "the Ocean Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\" Instance types not allowed in", "role. \"\"\" image_id: pulumi.Output[str] \"\"\" ID of the image used to launch the", "pulumi.Output[list] \"\"\" Optionally adds tags to instances launched in an Ocean cluster. \"\"\"", "will utilize them before launching Spot instances. \"\"\" whitelists: pulumi.Output[list] \"\"\" Instance types", "\"\"\" One or more security group ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\" The percentage", "role. :param pulumi.Input[str] image_id: ID of the image used to launch the instances.", "pulumi.Input[list] security_groups: One or more security group ids. :param pulumi.Input[float] spot_percentage: The percentage", "the instances. :param pulumi.Input[str] key_name: The key pair to attach the instances. :param", "subnet identifiers for the Ocean cluster. Subnet IDs should be configured with auto", "pulumi.Output[list] \"\"\" Instance types allowed in the Ocean cluster. Cannot be configured if", "pulumi.Input[list] whitelists: Instance types allowed in the Ocean cluster. Cannot be configured if", "opts and not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions", "to launch the instances. :param pulumi.Input[str] key_name: The key pair to attach the", "user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME user data to make available to the instances.", "\"\"\" def __init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None,", "to launch On-Demand instances instead. :param pulumi.Input[str] iam_instance_profile: The instance profile iam role.", "lower limit of instances the cluster can scale down to. :param pulumi.Input[str] name:", "with auto assign public ip. :param pulumi.Input[list] tags: Optionally adds tags to instances", "is None: raise TypeError('Missing required property security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage'] = spot_percentage", "is configured. \"\"\" def __init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None,", "pulumi.Output[int] \"\"\" The lower limit of instances the cluster can scale down to.", "ip. \"\"\" tags: pulumi.Output[list] \"\"\" Optionally adds tags to instances launched in an", "controller_id: pulumi.Output[str] \"\"\" The ocean cluster identifier. Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\"", "allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured. :param", "__props__['max_size'] = max_size __props__['min_size'] = min_size __props__['name'] = name __props__['region'] = region if", "__name__ if __opts__ is not None: warnings.warn(\"explicit use of __opts__ is deprecated, use", "the cluster can scale down to. :param pulumi.Input[str] name: The cluster name. :param", "DeprecationWarning) opts = __opts__ if not resource_name: raise TypeError('Missing resource name argument (for", "ID of the image used to launch the instances. :param pulumi.Input[str] key_name: The", "autoscaler. :param pulumi.Input[list] blacklists: Instance types not allowed in the Ocean cluster. Cannot", "identifier. Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity: The number of instances to launch and", "the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by", "of the image used to launch the instances. :param pulumi.Input[str] key_name: The key", "utilize them before launching Spot instances. :param pulumi.Input[list] whitelists: Instance types allowed in", "instances. \"\"\" key_name: pulumi.Output[str] \"\"\" The key pair to attach the instances. \"\"\"", "Base64-encoded MIME user data to make available to the instances. :param pulumi.Input[bool] utilize_reserved_instances:", "be configured with auto assign public ip. :param pulumi.Input[list] tags: Optionally adds tags", "image used to launch the instances. :param pulumi.Input[str] key_name: The key pair to", "Ocean to launch On-Demand instances instead. :param pulumi.Input[str] iam_instance_profile: The instance profile iam", "= region if security_groups is None: raise TypeError('Missing required property security_groups') __props__['security_groups'] =", "resource. :param pulumi.Input[dict] autoscaler: Describes the Ocean Kubernetes autoscaler. :param pulumi.Input[list] blacklists: Instance", "Spot instances. :param pulumi.Input[list] whitelists: Instance types allowed in the Ocean cluster. Cannot", "instances launched in an Ocean cluster. \"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME user", "number of instances to launch and maintain in the cluster. :param pulumi.Input[bool] fallback_to_ondemand:", "image_id: ID of the image used to launch the instances. :param pulumi.Input[str] key_name:", "max_size __props__['min_size'] = min_size __props__['name'] = name __props__['region'] = region if security_groups is", "the cluster should maintain. Min 0, max 100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\" A", "max 100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\" A comma-separated list of subnet identifiers for", "profile iam role. :param pulumi.Input[str] image_id: ID of the image used to launch", "\"\"\" The instance profile iam role. \"\"\" image_id: pulumi.Output[str] \"\"\" ID of the", "instances. :param pulumi.Input[str] key_name: The key pair to attach the instances. :param pulumi.Input[int]", "The percentage of Spot instances the cluster should maintain. Min 0, max 100.", "the resource. :param pulumi.Input[dict] autoscaler: Describes the Ocean Kubernetes autoscaler. :param pulumi.Input[list] blacklists:", "are doing! *** import json import warnings import pulumi import pulumi.runtime from ..", "ocean cluster identifier. Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\" The number of instances", "hand unless you're certain you know what you are doing! *** import json", "by hand unless you're certain you know what you are doing! *** import", "instances to launch and maintain in the cluster. :param pulumi.Input[bool] fallback_to_ondemand: If not", "cluster should maintain. Min 0, max 100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\" A comma-separated", "use 'opts' instead\", DeprecationWarning) opts = __opts__ if not resource_name: raise TypeError('Missing resource", "types allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured.", "__props__['user_data'] = user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name,", "pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available, enable Ocean to launch", "__props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] = image_id __props__['key_name'] = key_name __props__['max_size']", "pulumi.Output[float] \"\"\" The percentage of Spot instances the cluster should maintain. Min 0,", "\"\"\" tags: pulumi.Output[list] \"\"\" Optionally adds tags to instances launched in an Ocean", "launch On-Demand instances instead. :param pulumi.Input[str] iam_instance_profile: The instance profile iam role. :param", "to instances launched in an Ocean cluster. :param pulumi.Input[str] user_data: Base64-encoded MIME user", "upper limit of instances the cluster can scale up to. \"\"\" min_size: pulumi.Output[int]", "__opts__ if not resource_name: raise TypeError('Missing resource name argument (for URN creation)') if", "configured. \"\"\" def __init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None,", "Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're", "pulumi import pulumi.runtime from .. import utilities, tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\"", "Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist` is", "desired_capacity: The number of instances to launch and maintain in the cluster. :param", "launching Spot instances. :param pulumi.Input[list] whitelists: Instance types allowed in the Ocean cluster.", "the Ocean cluster. Cannot be configured if `blacklist` is configured. \"\"\" if __name__", "import warnings import pulumi import pulumi.runtime from .. import utilities, tables class Ocean(pulumi.CustomResource):", "tags to instances launched in an Ocean cluster. :param pulumi.Input[str] user_data: Base64-encoded MIME", "to the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved instances exist, OCean will", "\"\"\" If Reserved instances exist, OCean will utilize them before launching Spot instances.", "of instances the cluster can scale up to. :param pulumi.Input[int] min_size: The lower", "cluster should maintain. Min 0, max 100. :param pulumi.Input[list] subnet_ids: A comma-separated list", "configured if `whitelist` is configured. :param pulumi.Input[str] controller_id: The ocean cluster identifier. Example:", "whitelists=None, __name__=None, __opts__=None): \"\"\" Provides a Spotinst Ocean AWS resource. :param str resource_name:", "controller_id: The ocean cluster identifier. Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity: The number of", "the cluster will run in. \"\"\" security_groups: pulumi.Output[list] \"\"\" One or more security", "instances. \"\"\" max_size: pulumi.Output[int] \"\"\" The upper limit of instances the cluster can", "for the resource. :param pulumi.Input[dict] autoscaler: Describes the Ocean Kubernetes autoscaler. :param pulumi.Input[list]", "more security group ids. :param pulumi.Input[float] spot_percentage: The percentage of Spot instances the", "# *** Do not edit by hand unless you're certain you know what", "min_size: pulumi.Output[int] \"\"\" The lower limit of instances the cluster can scale down", "the cluster should maintain. Min 0, max 100. :param pulumi.Input[list] subnet_ids: A comma-separated", "__props__['subnet_ids'] = subnet_ids __props__['tags'] = tags __props__['user_data'] = user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists']", ":param pulumi.Input[str] key_name: The key pair to attach the instances. :param pulumi.Input[int] max_size:", "IDs should be configured with auto assign public ip. \"\"\" tags: pulumi.Output[list] \"\"\"", "instances the cluster can scale down to. :param pulumi.Input[str] name: The cluster name.", "max_size=None, min_size=None, name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None):", "coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge", "= min_size __props__['name'] = name __props__['region'] = region if security_groups is None: raise", "*** Do not edit by hand unless you're certain you know what you", "__props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] = image_id __props__['key_name'] = key_name __props__['max_size'] = max_size __props__['min_size']", "tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\" Provides a Spotinst Ocean AWS resource.", "to launch and maintain in the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not", "pulumi.Output[str] \"\"\" The instance profile iam role. \"\"\" image_id: pulumi.Output[str] \"\"\" ID of", "Describes the Ocean Kubernetes autoscaler. :param pulumi.Input[list] blacklists: Instance types not allowed in", "\"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved instances exist, OCean will utilize them before", "IDs should be configured with auto assign public ip. :param pulumi.Input[list] tags: Optionally", "\"\"\" The percentage of Spot instances the cluster should maintain. Min 0, max", "def __init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None,", "markets are available, enable Ocean to launch On-Demand instances instead. \"\"\" iam_instance_profile: pulumi.Output[str]", "will utilize them before launching Spot instances. :param pulumi.Input[list] whitelists: Instance types allowed", "Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\" Instance types not allowed in the Ocean", "TypeError('Missing required property security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage'] = spot_percentage if subnet_ids is", "\"\"\" security_groups: pulumi.Output[list] \"\"\" One or more security group ids. \"\"\" spot_percentage: pulumi.Output[float]", "security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage'] = spot_percentage if subnet_ids is None: raise TypeError('Missing", "resource_name = __name__ if __opts__ is not None: warnings.warn(\"explicit use of __opts__ is", "\"\"\" if __name__ is not None: warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning)", "is deprecated, use 'opts' instead\", DeprecationWarning) opts = __opts__ if not resource_name: raise", "to. :param pulumi.Input[str] name: The cluster name. :param pulumi.Input[str] region: The region the", "before launching Spot instances. \"\"\" whitelists: pulumi.Output[list] \"\"\" Instance types allowed in the", "resource name argument (for URN creation)') if not isinstance(resource_name, str): raise TypeError('Expected resource", "Ocean cluster. :param pulumi.Input[str] user_data: Base64-encoded MIME user data to make available to", "will run in. \"\"\" security_groups: pulumi.Output[list] \"\"\" One or more security group ids.", "opts = __opts__ if not resource_name: raise TypeError('Missing resource name argument (for URN", "\"\"\" blacklists: pulumi.Output[list] \"\"\" Instance types not allowed in the Ocean cluster. Cannot", ":param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for", "maintain in the cluster. :param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are", "resource_name, __props__, opts) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop):", "in the cluster. :param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available,", "Tool. *** # *** Do not edit by hand unless you're certain you", ":param pulumi.Input[int] desired_capacity: The number of instances to launch and maintain in the", "fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] = image_id __props__['key_name'] = key_name __props__['max_size'] = max_size", "DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn(\"explicit use of __opts__", "is None: raise TypeError('Missing required property subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags'] = tags", "warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning) resource_name = __name__ if __opts__ is", "be configured if `whitelist` is configured. :param pulumi.Input[str] controller_id: The ocean cluster identifier.", "if `blacklist` is configured. \"\"\" def __init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None,", "If not Spot instance markets are available, enable Ocean to launch On-Demand instances", "blacklists __props__['controller_id'] = controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile", "markets are available, enable Ocean to launch On-Demand instances instead. :param pulumi.Input[str] iam_instance_profile:", "edit by hand unless you're certain you know what you are doing! ***", "run in. \"\"\" security_groups: pulumi.Output[list] \"\"\" One or more security group ids. \"\"\"", "\"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The instance profile iam role. \"\"\" image_id: pulumi.Output[str] \"\"\"", "AWS resource. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts:", "resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource.", "opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None, name=None, region=None,", "None: warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning) opts =", "public ip. \"\"\" tags: pulumi.Output[list] \"\"\" Optionally adds tags to instances launched in", "in the Ocean cluster. Cannot be configured if `whitelist` is configured. \"\"\" controller_id:", "key_name __props__['max_size'] = max_size __props__['min_size'] = min_size __props__['name'] = name __props__['region'] = region", "Kubernetes autoscaler. :param pulumi.Input[list] blacklists: Instance types not allowed in the Ocean cluster.", "percentage of Spot instances the cluster should maintain. Min 0, max 100. \"\"\"", "pulumi.Input[str] key_name: The key pair to attach the instances. :param pulumi.Input[int] max_size: The", "\"\"\" subnet_ids: pulumi.Output[list] \"\"\" A comma-separated list of subnet identifiers for the Ocean", "The upper limit of instances the cluster can scale up to. :param pulumi.Input[int]", "ocean cluster identifier. Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity: The number of instances to", "pulumi.Output[str] \"\"\" The cluster name. \"\"\" region: pulumi.Output[str] \"\"\" The region the cluster", "is not None: warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning) resource_name = __name__", "raise TypeError('Missing required property subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags'] = tags __props__['user_data'] =", "cluster. Subnet IDs should be configured with auto assign public ip. \"\"\" tags:", "pair to attach the instances. \"\"\" max_size: pulumi.Output[int] \"\"\" The upper limit of", "instances the cluster can scale up to. \"\"\" min_size: pulumi.Output[int] \"\"\" The lower", "pulumi.Output[dict] \"\"\" Describes the Ocean Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\" Instance types", "__props__['region'] = region if security_groups is None: raise TypeError('Missing required property security_groups') __props__['security_groups']", "in. \"\"\" security_groups: pulumi.Output[list] \"\"\" One or more security group ids. \"\"\" spot_percentage:", "Ocean Kubernetes autoscaler. :param pulumi.Input[list] blacklists: Instance types not allowed in the Ocean", "cluster. Cannot be configured if `blacklist` is configured. \"\"\" if __name__ is not", "autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None, name=None, region=None, security_groups=None,", "cluster. Cannot be configured if `whitelist` is configured. \"\"\" controller_id: pulumi.Output[str] \"\"\" The", "of instances the cluster can scale down to. \"\"\" name: pulumi.Output[str] \"\"\" The", "cluster identifier. Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\" The number of instances to", "= key_name __props__['max_size'] = max_size __props__['min_size'] = min_size __props__['name'] = name __props__['region'] =", "Reserved instances exist, OCean will utilize them before launching Spot instances. :param pulumi.Input[list]", "instances exist, OCean will utilize them before launching Spot instances. \"\"\" whitelists: pulumi.Output[list]", "tags: pulumi.Output[list] \"\"\" Optionally adds tags to instances launched in an Ocean cluster.", "required property subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags'] = tags __props__['user_data'] = user_data __props__['utilize_reserved_instances']", "limit of instances the cluster can scale down to. :param pulumi.Input[str] name: The", "Reserved instances exist, OCean will utilize them before launching Spot instances. \"\"\" whitelists:", "the instances. :param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, OCean will utilize them", ":param pulumi.Input[str] image_id: ID of the image used to launch the instances. :param", "can scale down to. \"\"\" name: pulumi.Output[str] \"\"\" The cluster name. \"\"\" region:", "be configured if `blacklist` is configured. \"\"\" if __name__ is not None: warnings.warn(\"explicit", "utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def translate_output_property(self, prop):", "and maintain in the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not Spot instance", "raise TypeError('Expected resource name to be a string') if opts and not isinstance(opts,", "fallback_to_ondemand: If not Spot instance markets are available, enable Ocean to launch On-Demand", "configured if `whitelist` is configured. \"\"\" controller_id: pulumi.Output[str] \"\"\" The ocean cluster identifier.", "be configured if `whitelist` is configured. \"\"\" controller_id: pulumi.Output[str] \"\"\" The ocean cluster", "subnet_ids __props__['tags'] = tags __props__['user_data'] = user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] = whitelists", "if `whitelist` is configured. :param pulumi.Input[str] controller_id: The ocean cluster identifier. Example: `ocean.k8s`", "The instance profile iam role. \"\"\" image_id: pulumi.Output[str] \"\"\" ID of the image", "\"\"\" name: pulumi.Output[str] \"\"\" The cluster name. \"\"\" region: pulumi.Output[str] \"\"\" The region", "name to be a string') if opts and not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected", "launch On-Demand instances instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The instance profile iam role.", "OCean will utilize them before launching Spot instances. \"\"\" whitelists: pulumi.Output[list] \"\"\" Instance", "in the Ocean cluster. Cannot be configured if `blacklist` is configured. \"\"\" def", "user data to make available to the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If", "Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes the Ocean Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\"", "to instances launched in an Ocean cluster. \"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME", "number of instances to launch and maintain in the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool]", "security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\" Provides a Spotinst", "user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts)", "user data to make available to the instances. :param pulumi.Input[bool] utilize_reserved_instances: If Reserved", "cluster name. :param pulumi.Input[str] region: The region the cluster will run in. :param", "list of subnet identifiers for the Ocean cluster. Subnet IDs should be configured", "fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None, name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None,", "= whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop)", "iam_instance_profile: The instance profile iam role. :param pulumi.Input[str] image_id: ID of the image", "pulumi.Output[int] \"\"\" The number of instances to launch and maintain in the cluster.", "the image used to launch the instances. :param pulumi.Input[str] key_name: The key pair", "Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\" The number of instances to launch and", "Ocean cluster. Cannot be configured if `whitelist` is configured. :param pulumi.Input[str] controller_id: The", ":param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, OCean will utilize them before launching", "name. \"\"\" region: pulumi.Output[str] \"\"\" The region the cluster will run in. \"\"\"", "security_groups: One or more security group ids. :param pulumi.Input[float] spot_percentage: The percentage of", "instance profile iam role. \"\"\" image_id: pulumi.Output[str] \"\"\" ID of the image used", "raise TypeError('Missing required property security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage'] = spot_percentage if subnet_ids", "instead. :param pulumi.Input[str] iam_instance_profile: The instance profile iam role. :param pulumi.Input[str] image_id: ID", "cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not Spot instance markets are available, enable", ":param pulumi.Input[list] security_groups: One or more security group ids. :param pulumi.Input[float] spot_percentage: The", "not None: warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning) opts", "available, enable Ocean to launch On-Demand instances instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The", "instance markets are available, enable Ocean to launch On-Demand instances instead. \"\"\" iam_instance_profile:", "utilize_reserved_instances: If Reserved instances exist, OCean will utilize them before launching Spot instances.", "Ocean cluster. Cannot be configured if `blacklist` is configured. \"\"\" def __init__(__self__, resource_name,", "not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') __props__", "are available, enable Ocean to launch On-Demand instances instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\"", "of the image used to launch the instances. \"\"\" key_name: pulumi.Output[str] \"\"\" The", "Subnet IDs should be configured with auto assign public ip. \"\"\" tags: pulumi.Output[list]", "can scale up to. \"\"\" min_size: pulumi.Output[int] \"\"\" The lower limit of instances", "Options for the resource. :param pulumi.Input[dict] autoscaler: Describes the Ocean Kubernetes autoscaler. :param", "ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\" The percentage of Spot instances the cluster should", "data to make available to the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved", "use of __name__ is deprecated\", DeprecationWarning) resource_name = __name__ if __opts__ is not", "desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] = image_id __props__['key_name'] = key_name", "Ocean cluster. \"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME user data to make available", "image used to launch the instances. \"\"\" key_name: pulumi.Output[str] \"\"\" The key pair", "of Spot instances the cluster should maintain. Min 0, max 100. :param pulumi.Input[list]", "if not isinstance(resource_name, str): raise TypeError('Expected resource name to be a string') if", "\"\"\" image_id: pulumi.Output[str] \"\"\" ID of the image used to launch the instances.", "*** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool.", "Spot instances. \"\"\" whitelists: pulumi.Output[list] \"\"\" Instance types allowed in the Ocean cluster.", "Spotinst Ocean AWS resource. :param str resource_name: The name of the resource. :param", "blacklists: pulumi.Output[list] \"\"\" Instance types not allowed in the Ocean cluster. Cannot be", "of instances to launch and maintain in the cluster. :param pulumi.Input[bool] fallback_to_ondemand: If", "a string') if opts and not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to", "The cluster name. :param pulumi.Input[str] region: The region the cluster will run in.", "property subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags'] = tags __props__['user_data'] = user_data __props__['utilize_reserved_instances'] =", "__props__['whitelists'] = whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def translate_output_property(self, prop): return", "by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit", "before launching Spot instances. :param pulumi.Input[list] whitelists: Instance types allowed in the Ocean", "spot_percentage: pulumi.Output[float] \"\"\" The percentage of Spot instances the cluster should maintain. Min", "limit of instances the cluster can scale up to. \"\"\" min_size: pulumi.Output[int] \"\"\"", "cluster. Cannot be configured if `blacklist` is configured. \"\"\" def __init__(__self__, resource_name, opts=None,", "key_name=None, max_size=None, min_size=None, name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None,", "assign public ip. :param pulumi.Input[list] tags: Optionally adds tags to instances launched in", "if __opts__ is not None: warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts'", ":param pulumi.Input[list] whitelists: Instance types allowed in the Ocean cluster. Cannot be configured", "in. :param pulumi.Input[list] security_groups: One or more security group ids. :param pulumi.Input[float] spot_percentage:", "spot_percentage: The percentage of Spot instances the cluster should maintain. Min 0, max", "of instances the cluster can scale down to. :param pulumi.Input[str] name: The cluster", "the instances. :param pulumi.Input[int] max_size: The upper limit of instances the cluster can", "= fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] = image_id __props__['key_name'] = key_name __props__['max_size'] =", "translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop", "instances instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The instance profile iam role. \"\"\" image_id:", "with auto assign public ip. \"\"\" tags: pulumi.Output[list] \"\"\" Optionally adds tags to", "up to. \"\"\" min_size: pulumi.Output[int] \"\"\" The lower limit of instances the cluster", "TypeError('Expected resource name to be a string') if opts and not isinstance(opts, pulumi.ResourceOptions):", "exist, OCean will utilize them before launching Spot instances. :param pulumi.Input[list] whitelists: Instance", "`blacklist` is configured. \"\"\" def __init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None,", "doing! *** import json import warnings import pulumi import pulumi.runtime from .. import", "is configured. :param pulumi.Input[str] controller_id: The ocean cluster identifier. Example: `ocean.k8s` :param pulumi.Input[int]", "= tags __props__['user_data'] = user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean, __self__).__init__(", "100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\" A comma-separated list of subnet identifiers for the", "0, max 100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\" A comma-separated list of subnet identifiers", "Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand", "super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop", "user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\" Provides a Spotinst Ocean AWS resource. :param", "pulumi.Input[list] blacklists: Instance types not allowed in the Ocean cluster. Cannot be configured", "upper limit of instances the cluster can scale up to. :param pulumi.Input[int] min_size:", "if `blacklist` is configured. \"\"\" if __name__ is not None: warnings.warn(\"explicit use of", "(for URN creation)') if not isinstance(resource_name, str): raise TypeError('Expected resource name to be", "cluster name. \"\"\" region: pulumi.Output[str] \"\"\" The region the cluster will run in.", "image_id=None, key_name=None, max_size=None, min_size=None, name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None,", "used to launch the instances. \"\"\" key_name: pulumi.Output[str] \"\"\" The key pair to", "the Ocean cluster. Cannot be configured if `whitelist` is configured. :param pulumi.Input[str] controller_id:", "if `whitelist` is configured. \"\"\" controller_id: pulumi.Output[str] \"\"\" The ocean cluster identifier. Example:", "of Spot instances the cluster should maintain. Min 0, max 100. \"\"\" subnet_ids:", "Cannot be configured if `whitelist` is configured. \"\"\" controller_id: pulumi.Output[str] \"\"\" The ocean", "instances instead. :param pulumi.Input[str] iam_instance_profile: The instance profile iam role. :param pulumi.Input[str] image_id:", "if opts and not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a", "desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None, name=None, region=None, security_groups=None, spot_percentage=None, subnet_ids=None, tags=None,", "instances the cluster should maintain. Min 0, max 100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\"", "Min 0, max 100. :param pulumi.Input[list] subnet_ids: A comma-separated list of subnet identifiers", "them before launching Spot instances. \"\"\" whitelists: pulumi.Output[list] \"\"\" Instance types allowed in", "deprecated, use 'opts' instead\", DeprecationWarning) opts = __opts__ if not resource_name: raise TypeError('Missing", "autoscaler: pulumi.Output[dict] \"\"\" Describes the Ocean Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\" Instance", "whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or", "# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform", "One or more security group ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\" The percentage of", "raise TypeError('Missing resource name argument (for URN creation)') if not isinstance(resource_name, str): raise", "the image used to launch the instances. \"\"\" key_name: pulumi.Output[str] \"\"\" The key", "URN creation)') if not isinstance(resource_name, str): raise TypeError('Expected resource name to be a", "key_name: pulumi.Output[str] \"\"\" The key pair to attach the instances. \"\"\" max_size: pulumi.Output[int]", "to. :param pulumi.Input[int] min_size: The lower limit of instances the cluster can scale", "Ocean cluster. Cannot be configured if `whitelist` is configured. \"\"\" controller_id: pulumi.Output[str] \"\"\"", "or more security group ids. :param pulumi.Input[float] spot_percentage: The percentage of Spot instances", "region: pulumi.Output[str] \"\"\" The region the cluster will run in. \"\"\" security_groups: pulumi.Output[list]", "A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should", "import pulumi.runtime from .. import utilities, tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes", "max_size: The upper limit of instances the cluster can scale up to. :param", "The region the cluster will run in. :param pulumi.Input[list] security_groups: One or more", "pulumi.Output[str] \"\"\" The key pair to attach the instances. \"\"\" max_size: pulumi.Output[int] \"\"\"", "required property security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage'] = spot_percentage if subnet_ids is None:", "min_size: The lower limit of instances the cluster can scale down to. :param", "options to be a ResourceOptions instance') __props__ = dict() __props__['autoscaler'] = autoscaler __props__['blacklists']", "controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] = image_id", ":param pulumi.Input[list] subnet_ids: A comma-separated list of subnet identifiers for the Ocean cluster.", "__opts__=None): \"\"\" Provides a Spotinst Ocean AWS resource. :param str resource_name: The name", "str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the", "is deprecated\", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn(\"explicit use", "in an Ocean cluster. \"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME user data to", "creation)') if not isinstance(resource_name, str): raise TypeError('Expected resource name to be a string')", "profile iam role. \"\"\" image_id: pulumi.Output[str] \"\"\" ID of the image used to", "the cluster can scale up to. \"\"\" min_size: pulumi.Output[int] \"\"\" The lower limit", "region: The region the cluster will run in. :param pulumi.Input[list] security_groups: One or", "cluster identifier. Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity: The number of instances to launch", "Instance types not allowed in the Ocean cluster. Cannot be configured if `whitelist`", "limit of instances the cluster can scale down to. \"\"\" name: pulumi.Output[str] \"\"\"", ".. import utilities, tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes the Ocean Kubernetes", "\"\"\" spot_percentage: pulumi.Output[float] \"\"\" The percentage of Spot instances the cluster should maintain.", "cluster. \"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME user data to make available to", "property security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage'] = spot_percentage if subnet_ids is None: raise", "scale down to. :param pulumi.Input[str] name: The cluster name. :param pulumi.Input[str] region: The", "dict() __props__['autoscaler'] = autoscaler __props__['blacklists'] = blacklists __props__['controller_id'] = controller_id __props__['desired_capacity'] = desired_capacity", ":param pulumi.Input[int] max_size: The upper limit of instances the cluster can scale up", "pulumi.Output[str] \"\"\" Base64-encoded MIME user data to make available to the instances. \"\"\"", "the Ocean cluster. Cannot be configured if `blacklist` is configured. \"\"\" def __init__(__self__,", "\"\"\" Instance types not allowed in the Ocean cluster. Cannot be configured if", "cluster. :param pulumi.Input[str] user_data: Base64-encoded MIME user data to make available to the", "cluster can scale up to. \"\"\" min_size: pulumi.Output[int] \"\"\" The lower limit of", "of instances to launch and maintain in the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\"", "opts) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return tables._SNAKE_TO_CAMEL_CASE_TABLE.get(prop)", "of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[dict] autoscaler:", "pulumi.Input[str] iam_instance_profile: The instance profile iam role. :param pulumi.Input[str] image_id: ID of the", "the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[dict] autoscaler: Describes", "\"\"\" The region the cluster will run in. \"\"\" security_groups: pulumi.Output[list] \"\"\" One", "available, enable Ocean to launch On-Demand instances instead. :param pulumi.Input[str] iam_instance_profile: The instance", "The key pair to attach the instances. \"\"\" max_size: pulumi.Output[int] \"\"\" The upper", "__props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def", "= user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__,", "instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The instance profile iam role. \"\"\" image_id: pulumi.Output[str]", "launch and maintain in the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not Spot", "user_data: Base64-encoded MIME user data to make available to the instances. :param pulumi.Input[bool]", "name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[dict]", "100. :param pulumi.Input[list] subnet_ids: A comma-separated list of subnet identifiers for the Ocean", "configured if `blacklist` is configured. \"\"\" if __name__ is not None: warnings.warn(\"explicit use", "to. \"\"\" min_size: pulumi.Output[int] \"\"\" The lower limit of instances the cluster can", "pulumi.Input[int] max_size: The upper limit of instances the cluster can scale up to.", "TypeError('Missing resource name argument (for URN creation)') if not isinstance(resource_name, str): raise TypeError('Expected", "pulumi.Input[str] user_data: Base64-encoded MIME user data to make available to the instances. :param", "security group ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\" The percentage of Spot instances the", "subnet_ids is None: raise TypeError('Missing required property subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags'] =", "desired_capacity: pulumi.Output[int] \"\"\" The number of instances to launch and maintain in the", "__props__['blacklists'] = blacklists __props__['controller_id'] = controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile']", "tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes the Ocean Kubernetes autoscaler. \"\"\" blacklists:", "Ocean AWS resource. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions", "`whitelist` is configured. :param pulumi.Input[str] controller_id: The ocean cluster identifier. Example: `ocean.k8s` :param", "the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not Spot instance markets are available,", "lower limit of instances the cluster can scale down to. \"\"\" name: pulumi.Output[str]", "__props__['spot_percentage'] = spot_percentage if subnet_ids is None: raise TypeError('Missing required property subnet_ids') __props__['subnet_ids']", "Cannot be configured if `whitelist` is configured. :param pulumi.Input[str] controller_id: The ocean cluster", "instances to launch and maintain in the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If", "subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags'] = tags __props__['user_data'] = user_data __props__['utilize_reserved_instances'] = utilize_reserved_instances", "\"\"\" Instance types allowed in the Ocean cluster. Cannot be configured if `blacklist`", "If Reserved instances exist, OCean will utilize them before launching Spot instances. :param", "allowed in the Ocean cluster. Cannot be configured if `blacklist` is configured. \"\"\"", "ID of the image used to launch the instances. \"\"\" key_name: pulumi.Output[str] \"\"\"", "to launch the instances. \"\"\" key_name: pulumi.Output[str] \"\"\" The key pair to attach", "configured. :param pulumi.Input[str] controller_id: The ocean cluster identifier. Example: `ocean.k8s` :param pulumi.Input[int] desired_capacity:", "data to make available to the instances. :param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances", "attach the instances. \"\"\" max_size: pulumi.Output[int] \"\"\" The upper limit of instances the", "`whitelist` is configured. \"\"\" controller_id: pulumi.Output[str] \"\"\" The ocean cluster identifier. Example: `ocean.k8s`", "cluster. :param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available, enable Ocean", "instances. :param pulumi.Input[list] whitelists: Instance types allowed in the Ocean cluster. Cannot be", "cluster can scale up to. :param pulumi.Input[int] min_size: The lower limit of instances", "__props__['autoscaler'] = autoscaler __props__['blacklists'] = blacklists __props__['controller_id'] = controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand']", "\"\"\" Describes the Ocean Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\" Instance types not", "MIME user data to make available to the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\"", "pulumi.Input[float] spot_percentage: The percentage of Spot instances the cluster should maintain. Min 0,", "be a string') if opts and not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options", "\"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME user data to make available to the", "fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not Spot instance markets are available, enable Ocean to", "'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self,", "image_id __props__['key_name'] = key_name __props__['max_size'] = max_size __props__['min_size'] = min_size __props__['name'] = name", "`ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\" The number of instances to launch and maintain", "to launch On-Demand instances instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The instance profile iam", "= max_size __props__['min_size'] = min_size __props__['name'] = name __props__['region'] = region if security_groups", "key_name: The key pair to attach the instances. :param pulumi.Input[int] max_size: The upper", "\"\"\" whitelists: pulumi.Output[list] \"\"\" Instance types allowed in the Ocean cluster. Cannot be", "in the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not Spot instance markets are", "Ocean Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\" Instance types not allowed in the", "Do not edit by hand unless you're certain you know what you are", "not Spot instance markets are available, enable Ocean to launch On-Demand instances instead.", "enable Ocean to launch On-Demand instances instead. :param pulumi.Input[str] iam_instance_profile: The instance profile", "will run in. :param pulumi.Input[list] security_groups: One or more security group ids. :param", "launching Spot instances. \"\"\" whitelists: pulumi.Output[list] \"\"\" Instance types allowed in the Ocean", "to. \"\"\" name: pulumi.Output[str] \"\"\" The cluster name. \"\"\" region: pulumi.Output[str] \"\"\" The", "configured if `blacklist` is configured. \"\"\" def __init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None,", "__self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def translate_output_property(self, prop): return tables._CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def", "configured with auto assign public ip. \"\"\" tags: pulumi.Output[list] \"\"\" Optionally adds tags", "group ids. :param pulumi.Input[float] spot_percentage: The percentage of Spot instances the cluster should", "instead\", DeprecationWarning) opts = __opts__ if not resource_name: raise TypeError('Missing resource name argument", "auto assign public ip. :param pulumi.Input[list] tags: Optionally adds tags to instances launched", "generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not", "WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***", "min_size __props__['name'] = name __props__['region'] = region if security_groups is None: raise TypeError('Missing", "the instances. \"\"\" max_size: pulumi.Output[int] \"\"\" The upper limit of instances the cluster", "whitelists: pulumi.Output[list] \"\"\" Instance types allowed in the Ocean cluster. Cannot be configured", "autoscaler: Describes the Ocean Kubernetes autoscaler. :param pulumi.Input[list] blacklists: Instance types not allowed", "auto assign public ip. \"\"\" tags: pulumi.Output[list] \"\"\" Optionally adds tags to instances", "utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\" Provides a Spotinst Ocean AWS resource. :param str", "run in. :param pulumi.Input[list] security_groups: One or more security group ids. :param pulumi.Input[float]", "region the cluster will run in. \"\"\" security_groups: pulumi.Output[list] \"\"\" One or more", "utilities, tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes the Ocean Kubernetes autoscaler. \"\"\"", "The number of instances to launch and maintain in the cluster. \"\"\" fallback_to_ondemand:", "pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, OCean will utilize them before launching Spot", "the Ocean cluster. Cannot be configured if `whitelist` is configured. \"\"\" controller_id: pulumi.Output[str]", "region if security_groups is None: raise TypeError('Missing required property security_groups') __props__['security_groups'] = security_groups", "launch the instances. \"\"\" key_name: pulumi.Output[str] \"\"\" The key pair to attach the", "in an Ocean cluster. :param pulumi.Input[str] user_data: Base64-encoded MIME user data to make", "raise TypeError('Expected resource options to be a ResourceOptions instance') __props__ = dict() __props__['autoscaler']", "= name __props__['region'] = region if security_groups is None: raise TypeError('Missing required property", "= blacklists __props__['controller_id'] = controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] =", "available to the instances. \"\"\" utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved instances exist, OCean", "identifiers for the Ocean cluster. Subnet IDs should be configured with auto assign", "= iam_instance_profile __props__['image_id'] = image_id __props__['key_name'] = key_name __props__['max_size'] = max_size __props__['min_size'] =", "not allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured.", "pulumi.Output[list] \"\"\" One or more security group ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\" The", "TypeError('Missing required property subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags'] = tags __props__['user_data'] = user_data", "warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning) opts = __opts__", "__props__['security_groups'] = security_groups __props__['spot_percentage'] = spot_percentage if subnet_ids is None: raise TypeError('Missing required", "Ocean cluster. Subnet IDs should be configured with auto assign public ip. \"\"\"", "can scale up to. :param pulumi.Input[int] min_size: The lower limit of instances the", "a ResourceOptions instance') __props__ = dict() __props__['autoscaler'] = autoscaler __props__['blacklists'] = blacklists __props__['controller_id']", "available to the instances. :param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, OCean will", "The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param", "import pulumi import pulumi.runtime from .. import utilities, tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict]", "not None: warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning) resource_name = __name__ if", "comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs should be", "should maintain. Min 0, max 100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\" A comma-separated list", "pulumi.Input[str] image_id: ID of the image used to launch the instances. :param pulumi.Input[str]", "pulumi.Output[bool] \"\"\" If Reserved instances exist, OCean will utilize them before launching Spot", "an Ocean cluster. :param pulumi.Input[str] user_data: Base64-encoded MIME user data to make available", "max_size: pulumi.Output[int] \"\"\" The upper limit of instances the cluster can scale up", "maintain. Min 0, max 100. \"\"\" subnet_ids: pulumi.Output[list] \"\"\" A comma-separated list of", "unless you're certain you know what you are doing! *** import json import", "*** import json import warnings import pulumi import pulumi.runtime from .. import utilities,", "\"\"\" The upper limit of instances the cluster can scale up to. \"\"\"", "__name__=None, __opts__=None): \"\"\" Provides a Spotinst Ocean AWS resource. :param str resource_name: The", "an Ocean cluster. \"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded MIME user data to make", "assign public ip. \"\"\" tags: pulumi.Output[list] \"\"\" Optionally adds tags to instances launched", "import json import warnings import pulumi import pulumi.runtime from .. import utilities, tables", "None: raise TypeError('Missing required property security_groups') __props__['security_groups'] = security_groups __props__['spot_percentage'] = spot_percentage if", "configured with auto assign public ip. :param pulumi.Input[list] tags: Optionally adds tags to", "__name__ is deprecated\", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn(\"explicit", ":param pulumi.Input[str] region: The region the cluster will run in. :param pulumi.Input[list] security_groups:", "The region the cluster will run in. \"\"\" security_groups: pulumi.Output[list] \"\"\" One or", "enable Ocean to launch On-Demand instances instead. \"\"\" iam_instance_profile: pulumi.Output[str] \"\"\" The instance", "\"\"\" If not Spot instance markets are available, enable Ocean to launch On-Demand", "blacklists: Instance types not allowed in the Ocean cluster. Cannot be configured if", "allowed in the Ocean cluster. Cannot be configured if `whitelist` is configured. \"\"\"", "MIME user data to make available to the instances. :param pulumi.Input[bool] utilize_reserved_instances: If", "can scale down to. :param pulumi.Input[str] name: The cluster name. :param pulumi.Input[str] region:", "exist, OCean will utilize them before launching Spot instances. \"\"\" whitelists: pulumi.Output[list] \"\"\"", "public ip. :param pulumi.Input[list] tags: Optionally adds tags to instances launched in an", "subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\" Provides a Spotinst Ocean AWS", "attach the instances. :param pulumi.Input[int] max_size: The upper limit of instances the cluster", "cluster. Cannot be configured if `whitelist` is configured. :param pulumi.Input[str] controller_id: The ocean", "\"\"\" key_name: pulumi.Output[str] \"\"\" The key pair to attach the instances. \"\"\" max_size:", "down to. :param pulumi.Input[str] name: The cluster name. :param pulumi.Input[str] region: The region", "scale up to. :param pulumi.Input[int] min_size: The lower limit of instances the cluster", "pulumi.Input[str] region: The region the cluster will run in. :param pulumi.Input[list] security_groups: One", "subnet_ids: A comma-separated list of subnet identifiers for the Ocean cluster. Subnet IDs", "json import warnings import pulumi import pulumi.runtime from .. import utilities, tables class", "utilize them before launching Spot instances. \"\"\" whitelists: pulumi.Output[list] \"\"\" Instance types allowed", "this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** #", "\"\"\" Optionally adds tags to instances launched in an Ocean cluster. \"\"\" user_data:", "region the cluster will run in. :param pulumi.Input[list] security_groups: One or more security", "security group ids. :param pulumi.Input[float] spot_percentage: The percentage of Spot instances the cluster", "resource. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options", "= __name__ if __opts__ is not None: warnings.warn(\"explicit use of __opts__ is deprecated,", "The cluster name. \"\"\" region: pulumi.Output[str] \"\"\" The region the cluster will run", "\"\"\" The ocean cluster identifier. Example: `ocean.k8s` \"\"\" desired_capacity: pulumi.Output[int] \"\"\" The number", "maintain in the cluster. \"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not Spot instance markets", "down to. \"\"\" name: pulumi.Output[str] \"\"\" The cluster name. \"\"\" region: pulumi.Output[str] \"\"\"", "\"\"\" Provides a Spotinst Ocean AWS resource. :param str resource_name: The name of", "used to launch the instances. :param pulumi.Input[str] key_name: The key pair to attach", "'opts' instead\", DeprecationWarning) opts = __opts__ if not resource_name: raise TypeError('Missing resource name", "iam role. :param pulumi.Input[str] image_id: ID of the image used to launch the", "instance') __props__ = dict() __props__['autoscaler'] = autoscaler __props__['blacklists'] = blacklists __props__['controller_id'] = controller_id", "instances. :param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist, OCean will utilize them before", "pulumi.Input[list] tags: Optionally adds tags to instances launched in an Ocean cluster. :param", "name: The cluster name. :param pulumi.Input[str] region: The region the cluster will run", "to make available to the instances. :param pulumi.Input[bool] utilize_reserved_instances: If Reserved instances exist,", "utilize_reserved_instances: pulumi.Output[bool] \"\"\" If Reserved instances exist, OCean will utilize them before launching", "isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') __props__ =", "__init__(__self__, resource_name, opts=None, autoscaler=None, blacklists=None, controller_id=None, desired_capacity=None, fallback_to_ondemand=None, iam_instance_profile=None, image_id=None, key_name=None, max_size=None, min_size=None,", "resource name to be a string') if opts and not isinstance(opts, pulumi.ResourceOptions): raise", "Subnet IDs should be configured with auto assign public ip. :param pulumi.Input[list] tags:", "maintain. Min 0, max 100. :param pulumi.Input[list] subnet_ids: A comma-separated list of subnet", "image_id: pulumi.Output[str] \"\"\" ID of the image used to launch the instances. \"\"\"", "\"\"\" The lower limit of instances the cluster can scale down to. \"\"\"", "the cluster. :param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available, enable", "= utilize_reserved_instances __props__['whitelists'] = whitelists super(Ocean, __self__).__init__( 'spotinst:aws/ocean:Ocean', resource_name, __props__, opts) def translate_output_property(self,", "ResourceOptions instance') __props__ = dict() __props__['autoscaler'] = autoscaler __props__['blacklists'] = blacklists __props__['controller_id'] =", "and maintain in the cluster. :param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets", "pulumi.Input[dict] autoscaler: Describes the Ocean Kubernetes autoscaler. :param pulumi.Input[list] blacklists: Instance types not", "scale down to. \"\"\" name: pulumi.Output[str] \"\"\" The cluster name. \"\"\" region: pulumi.Output[str]", "max 100. :param pulumi.Input[list] subnet_ids: A comma-separated list of subnet identifiers for the", "to be a string') if opts and not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource", "`blacklist` is configured. \"\"\" if __name__ is not None: warnings.warn(\"explicit use of __name__", "__props__['min_size'] = min_size __props__['name'] = name __props__['region'] = region if security_groups is None:", "security_groups __props__['spot_percentage'] = spot_percentage if subnet_ids is None: raise TypeError('Missing required property subnet_ids')", "Optionally adds tags to instances launched in an Ocean cluster. :param pulumi.Input[str] user_data:", "in the Ocean cluster. Cannot be configured if `whitelist` is configured. :param pulumi.Input[str]", "pulumi.Input[int] min_size: The lower limit of instances the cluster can scale down to.", "__props__['key_name'] = key_name __props__['max_size'] = max_size __props__['min_size'] = min_size __props__['name'] = name __props__['region']", "tags: Optionally adds tags to instances launched in an Ocean cluster. :param pulumi.Input[str]", "pulumi.runtime from .. import utilities, tables class Ocean(pulumi.CustomResource): autoscaler: pulumi.Output[dict] \"\"\" Describes the", "the Ocean Kubernetes autoscaler. :param pulumi.Input[list] blacklists: Instance types not allowed in the", ":param pulumi.Input[str] iam_instance_profile: The instance profile iam role. :param pulumi.Input[str] image_id: ID of", "be configured with auto assign public ip. \"\"\" tags: pulumi.Output[list] \"\"\" Optionally adds", "them before launching Spot instances. :param pulumi.Input[list] whitelists: Instance types allowed in the", "cluster. Subnet IDs should be configured with auto assign public ip. :param pulumi.Input[list]", "On-Demand instances instead. :param pulumi.Input[str] iam_instance_profile: The instance profile iam role. :param pulumi.Input[str]", "pulumi.Output[bool] \"\"\" If not Spot instance markets are available, enable Ocean to launch", "spot_percentage=None, subnet_ids=None, tags=None, user_data=None, utilize_reserved_instances=None, whitelists=None, __name__=None, __opts__=None): \"\"\" Provides a Spotinst Ocean", "configured. \"\"\" controller_id: pulumi.Output[str] \"\"\" The ocean cluster identifier. Example: `ocean.k8s` \"\"\" desired_capacity:", "Describes the Ocean Kubernetes autoscaler. \"\"\" blacklists: pulumi.Output[list] \"\"\" Instance types not allowed", "cluster can scale down to. \"\"\" name: pulumi.Output[str] \"\"\" The cluster name. \"\"\"", "Cannot be configured if `blacklist` is configured. \"\"\" def __init__(__self__, resource_name, opts=None, autoscaler=None,", "limit of instances the cluster can scale up to. :param pulumi.Input[int] min_size: The", "use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning) opts = __opts__ if", "of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning) opts = __opts__ if not", "Optionally adds tags to instances launched in an Ocean cluster. \"\"\" user_data: pulumi.Output[str]", "more security group ids. \"\"\" spot_percentage: pulumi.Output[float] \"\"\" The percentage of Spot instances", "ip. :param pulumi.Input[list] tags: Optionally adds tags to instances launched in an Ocean", "tags to instances launched in an Ocean cluster. \"\"\" user_data: pulumi.Output[str] \"\"\" Base64-encoded", "file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # ***", "autoscaler __props__['blacklists'] = blacklists __props__['controller_id'] = controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand", "you know what you are doing! *** import json import warnings import pulumi", "\"\"\" region: pulumi.Output[str] \"\"\" The region the cluster will run in. \"\"\" security_groups:", "Spot instances the cluster should maintain. Min 0, max 100. \"\"\" subnet_ids: pulumi.Output[list]", "not resource_name: raise TypeError('Missing resource name argument (for URN creation)') if not isinstance(resource_name,", "instances. :param pulumi.Input[int] max_size: The upper limit of instances the cluster can scale", "<reponame>346/pulumi-spotinst<filename>sdk/python/pulumi_spotinst/aws/ocean.py<gh_stars>1-10 # coding=utf-8 # *** WARNING: this file was generated by the Pulumi", "cluster will run in. \"\"\" security_groups: pulumi.Output[list] \"\"\" One or more security group", "Spot instances the cluster should maintain. Min 0, max 100. :param pulumi.Input[list] subnet_ids:", "resource options to be a ResourceOptions instance') __props__ = dict() __props__['autoscaler'] = autoscaler", "for the Ocean cluster. Subnet IDs should be configured with auto assign public", "is configured. \"\"\" if __name__ is not None: warnings.warn(\"explicit use of __name__ is", "if __name__ is not None: warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning) resource_name", "isinstance(resource_name, str): raise TypeError('Expected resource name to be a string') if opts and", "was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do", "launch the instances. :param pulumi.Input[str] key_name: The key pair to attach the instances.", "\"\"\" fallback_to_ondemand: pulumi.Output[bool] \"\"\" If not Spot instance markets are available, enable Ocean", "if subnet_ids is None: raise TypeError('Missing required property subnet_ids') __props__['subnet_ids'] = subnet_ids __props__['tags']", "Ocean cluster. Cannot be configured if `blacklist` is configured. \"\"\" if __name__ is", "pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[dict] autoscaler: Describes the Ocean Kubernetes", ":param pulumi.Input[bool] fallback_to_ondemand: If not Spot instance markets are available, enable Ocean to", ":param pulumi.Input[list] blacklists: Instance types not allowed in the Ocean cluster. Cannot be", "to attach the instances. :param pulumi.Input[int] max_size: The upper limit of instances the", "types not allowed in the Ocean cluster. Cannot be configured if `whitelist` is", "be a ResourceOptions instance') __props__ = dict() __props__['autoscaler'] = autoscaler __props__['blacklists'] = blacklists", "what you are doing! *** import json import warnings import pulumi import pulumi.runtime", "should be configured with auto assign public ip. \"\"\" tags: pulumi.Output[list] \"\"\" Optionally", "= controller_id __props__['desired_capacity'] = desired_capacity __props__['fallback_to_ondemand'] = fallback_to_ondemand __props__['iam_instance_profile'] = iam_instance_profile __props__['image_id'] =", "__opts__ is not None: warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\"," ]
[ "for endless # mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test', 'test'] #", "= 'data\\\\homedata_05_2.db' # SQLite db_init = False #False # True if we need", "# pub_topic = mzs[nb]+'system/state' msg_system = ['normal', 'issue','No issue'] wait_time = 5 broker_ip=brokers[nb]", "False issave = False # DSP init data percen_thr=0.05 # 5% of max", "= 5 broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] # sub_topic =", "db_name = 'data\\\\homedata_05_2.db' # SQLite db_init = False #False # True if we", "0- HIT-\"192.168.3.11\", 1 - open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames", "socket nb=1 # 0- HIT-\"192.168.3.11\", 1 - open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18", "module init data isplot = False issave = False # DSP init data", "<PASSWORD>[nb] # sub_topic = sub_topics[nb] # pub_topic = pub_topics[nb] # Common conn_time =", "<filename>init.py # configuration module import socket nb=1 # 0- HIT-\"192.168.3.11\", 1 - open", "sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state' msg_system = ['normal', 'issue','No issue']", "# ext_man = mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state'", "issave = False # DSP init data percen_thr=0.05 # 5% of max energy", "= 2048.0 deviation_percentage = 10 max_eucl = 0.5 # Acq init data acqtime", "sub_topic = sub_topics[nb] # pub_topic = pub_topics[nb] # Common conn_time = 0 #", "'data\\\\homedata_05_2.db' # SQLite db_init = False #False # True if we need reinit", "stands for endless # mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test', 'test']", "= sub_topics[nb] # pub_topic = pub_topics[nb] # Common conn_time = 0 # 0", "# pub_topics = [mzs[nb]+'test', 'test'] # ext_man = mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status',", "# FFT module init data isplot = False issave = False # DSP", "HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames = ['','',''] # should be", "pub_topics[nb] # Common conn_time = 0 # 0 stands for endless loop comm_topic", "5% of max energy holds Fs = 2048.0 deviation_percentage = 10 max_eucl =", "energy holds Fs = 2048.0 deviation_percentage = 10 max_eucl = 0.5 # Acq", "init data db_name = 'data\\\\homedata_05_2.db' # SQLite db_init = False #False # True", "HIT passwords = ['','',''] # should be modified for HIT broker_ip=brokers[nb] port=ports[nb] username", "= 60.0 # sec manag_time = 10 # sec # DB init data", "wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] # sub_topic", "10 # sec # DB init data db_name = 'data\\\\homedata_05_2.db' # SQLite db_init", "endless loop comm_topic = 'pr/Smart/' #comm_topic = 'pr/Smart/Home/' # FFT module init data", "broker_port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] # sub_topic = sub_topics[nb] # pub_topic", "#comm_topic = 'pr/Smart/Home/' # FFT module init data isplot = False issave =", "= 'pr/Smart/Home/' # FFT module init data isplot = False issave = False", "0 # 0 stands for endless # mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] # pub_topics", "be modified for HIT passwords = ['','',''] # should be modified for HIT", "# 0 stands for endless # mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] # pub_topics =", "usernames[nb] password = <PASSWORD>[nb] # sub_topic = sub_topics[nb] # pub_topic = pub_topics[nb] #", "percen_thr=0.05 # 5% of max energy holds Fs = 2048.0 deviation_percentage = 10", "= ['','',''] # should be modified for HIT passwords = ['','',''] # should", "False # DSP init data percen_thr=0.05 # 5% of max energy holds Fs", "= 0.5 # Acq init data acqtime = 60.0 # sec manag_time =", "# 0 stands for endless loop comm_topic = 'pr/Smart/' #comm_topic = 'pr/Smart/Home/' #", "[mzs[nb]+'test', 'test'] # ext_man = mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic", "data db_name = 'data\\\\homedata_05_2.db' # SQLite db_init = False #False # True if", "usernames[nb] password = <PASSWORD>[nb] conn_time = 0 # 0 stands for endless #", "endless # mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test', 'test'] # ext_man", "pub_topic = pub_topics[nb] # Common conn_time = 0 # 0 stands for endless", "10 max_eucl = 0.5 # Acq init data acqtime = 60.0 # sec", "conn_time = 0 # 0 stands for endless # mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#']", "= 0 # 0 stands for endless loop comm_topic = 'pr/Smart/' #comm_topic =", "for HIT passwords = ['','',''] # should be modified for HIT broker_ip=brokers[nb] port=ports[nb]", "=[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test', 'test'] # ext_man = mzs[nb]+'system/command' # sub_topic =", "- open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames = ['','',''] #", "= False # DSP init data percen_thr=0.05 # 5% of max energy holds", "[mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state' msg_system = ['normal', 'issue','No issue'] wait_time =", "= [mzs[nb]+'test', 'test'] # ext_man = mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] #", "60.0 # sec manag_time = 10 # sec # DB init data db_name", "= usernames[nb] password = <PASSWORD>[nb] conn_time = 0 # 0 stands for endless", "broker_ip=brokers[nb] port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] conn_time = 0 # 0", "# Common conn_time = 0 # 0 stands for endless loop comm_topic =", "should be modified for HIT passwords = ['','',''] # should be modified for", ".194.176.210\"] ports=['80','1883','1883'] usernames = ['','',''] # should be modified for HIT passwords =", "init data acqtime = 60.0 # sec manag_time = 10 # sec #", "db_init = False #False # True if we need reinit smart home setup", "sec manag_time = 10 # sec # DB init data db_name = 'data\\\\homedata_05_2.db'", "= 10 max_eucl = 0.5 # Acq init data acqtime = 60.0 #", "# sub_topic = sub_topics[nb] # pub_topic = pub_topics[nb] # Common conn_time = 0", "open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames = ['','',''] # should", "= usernames[nb] password = <PASSWORD>[nb] # sub_topic = sub_topics[nb] # pub_topic = pub_topics[nb]", "issue'] wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] #", "# sub_topics =[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test', 'test'] # ext_man = mzs[nb]+'system/command' #", "0 # 0 stands for endless loop comm_topic = 'pr/Smart/' #comm_topic = 'pr/Smart/Home/'", "max energy holds Fs = 2048.0 deviation_percentage = 10 max_eucl = 0.5 #", "True if we need reinit smart home setup # Meters consuption limits\" Water_max=0.02", "loop comm_topic = 'pr/Smart/' #comm_topic = 'pr/Smart/Home/' # FFT module init data isplot", "= mzs[nb]+'system/state' msg_system = ['normal', 'issue','No issue'] wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb] username", "Fs = 2048.0 deviation_percentage = 10 max_eucl = 0.5 # Acq init data", "module import socket nb=1 # 0- HIT-\"192.168.3.11\", 1 - open HiveMQ - broker.hivemq.com", "= 0 # 0 stands for endless # mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] #", "# Acq init data acqtime = 60.0 # sec manag_time = 10 #", "# sec # DB init data db_name = 'data\\\\homedata_05_2.db' # SQLite db_init =", "mzs[nb]+'system/state' msg_system = ['normal', 'issue','No issue'] wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb] username =", "conn_time = 0 # 0 stands for endless loop comm_topic = 'pr/Smart/' #comm_topic", "'issue','No issue'] wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb]", "'test'] # ext_man = mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic =", "broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] # sub_topic = sub_topics[nb] #", "DB init data db_name = 'data\\\\homedata_05_2.db' # SQLite db_init = False #False #", "= ['','',''] # should be modified for HIT broker_ip=brokers[nb] port=ports[nb] username = usernames[nb]", "#False # True if we need reinit smart home setup # Meters consuption", "comm_topic = 'pr/Smart/' #comm_topic = 'pr/Smart/Home/' # FFT module init data isplot =", "deviation_percentage = 10 max_eucl = 0.5 # Acq init data acqtime = 60.0", "= mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state' msg_system =", "# should be modified for HIT passwords = ['','',''] # should be modified", "SQLite db_init = False #False # True if we need reinit smart home", "sub_topics[nb] # pub_topic = pub_topics[nb] # Common conn_time = 0 # 0 stands", "['','',''] # should be modified for HIT passwords = ['','',''] # should be", "['normal', 'issue','No issue'] wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb] password =", "= 10 # sec # DB init data db_name = 'data\\\\homedata_05_2.db' # SQLite", "= <PASSWORD>[nb] # sub_topic = sub_topics[nb] # pub_topic = pub_topics[nb] # Common conn_time", "# configuration module import socket nb=1 # 0- HIT-\"192.168.3.11\", 1 - open HiveMQ", "= ['normal', 'issue','No issue'] wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb] password", "usernames = ['','',''] # should be modified for HIT passwords = ['','',''] #", "0.5 # Acq init data acqtime = 60.0 # sec manag_time = 10", "stands for endless loop comm_topic = 'pr/Smart/' #comm_topic = 'pr/Smart/Home/' # FFT module", "= [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state' msg_system = ['normal', 'issue','No issue'] wait_time", "ports=['80','1883','1883'] usernames = ['','',''] # should be modified for HIT passwords = ['','','']", "Common conn_time = 0 # 0 stands for endless loop comm_topic = 'pr/Smart/'", "passwords = ['','',''] # should be modified for HIT broker_ip=brokers[nb] port=ports[nb] username =", "init data percen_thr=0.05 # 5% of max energy holds Fs = 2048.0 deviation_percentage", "import socket nb=1 # 0- HIT-\"192.168.3.11\", 1 - open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')),", "str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames = ['','',''] # should be modified for HIT passwords", "isplot = False issave = False # DSP init data percen_thr=0.05 # 5%", "# 5% of max energy holds Fs = 2048.0 deviation_percentage = 10 max_eucl", "2048.0 deviation_percentage = 10 max_eucl = 0.5 # Acq init data acqtime =", "username = usernames[nb] password = <PASSWORD>[nb] conn_time = 0 # 0 stands for", "holds Fs = 2048.0 deviation_percentage = 10 max_eucl = 0.5 # Acq init", "max_eucl = 0.5 # Acq init data acqtime = 60.0 # sec manag_time", "# sec manag_time = 10 # sec # DB init data db_name =", "password = <PASSWORD>[nb] conn_time = 0 # 0 stands for endless # mzs=['matzi/','']", "data acqtime = 60.0 # sec manag_time = 10 # sec # DB", "nb=1 # 0- HIT-\"192.168.3.11\", 1 - open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"]", "False #False # True if we need reinit smart home setup # Meters", "modified for HIT passwords = ['','',''] # should be modified for HIT broker_ip=brokers[nb]", "FFT module init data isplot = False issave = False # DSP init", "1 - open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames = ['','','']", "modified for HIT broker_ip=brokers[nb] port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] conn_time =", "= False issave = False # DSP init data percen_thr=0.05 # 5% of", "msg_system = ['normal', 'issue','No issue'] wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb]", "mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test', 'test'] # ext_man = mzs[nb]+'system/command'", "<PASSWORD>[nb] conn_time = 0 # 0 stands for endless # mzs=['matzi/',''] # sub_topics", "ext_man = mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state' msg_system", "for endless loop comm_topic = 'pr/Smart/' #comm_topic = 'pr/Smart/Home/' # FFT module init", "acqtime = 60.0 # sec manag_time = 10 # sec # DB init", "HIT broker_ip=brokers[nb] port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] conn_time = 0 #", "mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state' msg_system = ['normal', 'issue','No issue'] wait_time = 5", "= pub_topics[nb] # Common conn_time = 0 # 0 stands for endless loop", "Acq init data acqtime = 60.0 # sec manag_time = 10 # sec", "brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames = ['','',''] # should be modified for HIT", "0 stands for endless # mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test',", "# 0- HIT-\"192.168.3.11\", 1 - open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883']", "sec # DB init data db_name = 'data\\\\homedata_05_2.db' # SQLite db_init = False", "of max energy holds Fs = 2048.0 deviation_percentage = 10 max_eucl = 0.5", "- broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames = ['','',''] # should be modified", "HIT-\"192.168.3.11\", 1 - open HiveMQ - broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames =", "for HIT broker_ip=brokers[nb] port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] conn_time = 0", "pub_topics = [mzs[nb]+'test', 'test'] # ext_man = mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status']", "= <PASSWORD>[nb] conn_time = 0 # 0 stands for endless # mzs=['matzi/',''] #", "username = usernames[nb] password = <PASSWORD>[nb] # sub_topic = sub_topics[nb] # pub_topic =", "# SQLite db_init = False #False # True if we need reinit smart", "'pr/Smart/' #comm_topic = 'pr/Smart/Home/' # FFT module init data isplot = False issave", "should be modified for HIT broker_ip=brokers[nb] port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb]", "port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] conn_time = 0 # 0 stands", "# DB init data db_name = 'data\\\\homedata_05_2.db' # SQLite db_init = False #False", "sub_topics =[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test', 'test'] # ext_man = mzs[nb]+'system/command' # sub_topic", "# pub_topic = pub_topics[nb] # Common conn_time = 0 # 0 stands for", "data percen_thr=0.05 # 5% of max energy holds Fs = 2048.0 deviation_percentage =", "init data isplot = False issave = False # DSP init data percen_thr=0.05", "be modified for HIT broker_ip=brokers[nb] port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] conn_time", "5 broker_ip=brokers[nb] broker_port=ports[nb] username = usernames[nb] password = <PASSWORD>[nb] # sub_topic = sub_topics[nb]", "password = <PASSWORD>[nb] # sub_topic = sub_topics[nb] # pub_topic = pub_topics[nb] # Common", "# should be modified for HIT broker_ip=brokers[nb] port=ports[nb] username = usernames[nb] password =", "= 'pr/Smart/' #comm_topic = 'pr/Smart/Home/' # FFT module init data isplot = False", "manag_time = 10 # sec # DB init data db_name = 'data\\\\homedata_05_2.db' #", "'pr/Smart/Home/' # FFT module init data isplot = False issave = False #", "= False #False # True if we need reinit smart home setup #", "# mzs=['matzi/',''] # sub_topics =[mzs[nb]+'#','#'] # pub_topics = [mzs[nb]+'test', 'test'] # ext_man =", "if we need reinit smart home setup # Meters consuption limits\" Water_max=0.02 Elec_max=1.8", "pub_topic = mzs[nb]+'system/state' msg_system = ['normal', 'issue','No issue'] wait_time = 5 broker_ip=brokers[nb] broker_port=ports[nb]", "data isplot = False issave = False # DSP init data percen_thr=0.05 #", "mzs[nb]+'system/command' # sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state' msg_system = ['normal',", "configuration module import socket nb=1 # 0- HIT-\"192.168.3.11\", 1 - open HiveMQ -", "broker.hivemq.com brokers=[str(socket.gethostbyname('vmm1.saaintertrade.com')), str(socket.gethostbyname('broker.hivemq.com')),\"18 .194.176.210\"] ports=['80','1883','1883'] usernames = ['','',''] # should be modified for", "# DSP init data percen_thr=0.05 # 5% of max energy holds Fs =", "# sub_topic = [mzs[nb]+'bearer/accel/status', mzs[nb]+'bearer/belt/status'] # pub_topic = mzs[nb]+'system/state' msg_system = ['normal', 'issue','No", "# True if we need reinit smart home setup # Meters consuption limits\"", "DSP init data percen_thr=0.05 # 5% of max energy holds Fs = 2048.0", "0 stands for endless loop comm_topic = 'pr/Smart/' #comm_topic = 'pr/Smart/Home/' # FFT", "['','',''] # should be modified for HIT broker_ip=brokers[nb] port=ports[nb] username = usernames[nb] password" ]
[ "DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color,", "Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as f: for line in f.readlines(): Cids.append(line.strip())", "dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as f: for", "DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt)", "url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d in all: # 弹幕数据", "遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end = datetime.date(2020,6,9) d = begin delta", "DM_text = [] print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\") for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date", "open('Urls/Cid.txt', 'r') as f: for line in f.readlines(): Cids.append(line.strip()) for cid in Cids:", "# 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i in dm_data: DM_time.append(i[0])", "DM_userID.append(i[6]) DM_id.append(i[7]) for i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig') #存储弹幕信息 print(\"已将弹幕放入到Danmu-\"+str(sets)+\".csv文件中\")", "= [] DM_font = [] DM_color = [] DM_realTime = [] DM_pool =", "soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text())", "datetime.timedelta(days=1) while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[] # Cid数组,用于填充url with", "pd import requests import datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" }", "while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt',", "all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i in dm_data:", "dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2])", "DM_time = [] DM_mode = [] DM_font = [] DM_color = [] DM_realTime", "import pandas as pd import requests import datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", #", "from bs4 import BeautifulSoup import time import pandas as pd import requests import", "# 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3])", "delta = datetime.timedelta(days=1) while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[] #", "in Cids: # 每次都要重置这些数据 dm_data = [] # 弹幕数据 dm_text = [] #", "[] DM_id = [] DM_text = [] print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\") for", "begin delta = datetime.timedelta(days=1) while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[]", "DM_id = [] DM_text = [] print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\") for date", "for line in f.readlines(): Cids.append(line.strip()) for cid in Cids: # 每次都要重置这些数据 dm_data =", "datetime.date(2020,5,3) end = datetime.date(2020,6,9) d = begin delta = datetime.timedelta(days=1) while d <=", "\"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 # 最新一期的数字 dates=[] # 日期数组,用于填充url", "html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\"))", "f: for line in f.readlines(): Cids.append(line.strip()) for cid in Cids: # 每次都要重置这些数据 dm_data", "= [] DM_userID = [] DM_id = [] DM_text = [] print(\"正在爬取第\" +", "DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i in dm_text: DM_text.append(i)", "'r') as f: for line in f.readlines(): Cids.append(line.strip()) for cid in Cids: #", "#建立soup对象 all=soup.find_all(\"d\") for d in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) #", "import BeautifulSoup import time import pandas as pd import requests import datetime headers={", "# 弹幕的八个参数和弹幕本体 DM_time = [] DM_mode = [] DM_font = [] DM_color =", "= [] DM_mode = [] DM_font = [] DM_color = [] DM_realTime =", "DM_mode = [] DM_font = [] DM_color = [] DM_realTime = [] DM_pool", "最新一期的数字 dates=[] # 日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end =", "= [] # 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time = [] DM_mode = [] DM_font", "\"期的《睡前消息》弹幕...\") for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for", "with open('Urls/Cid.txt', 'r') as f: for line in f.readlines(): Cids.append(line.strip()) for cid in", "time import pandas as pd import requests import datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\",", "in f.readlines(): Cids.append(line.strip()) for cid in Cids: # 每次都要重置这些数据 dm_data = [] #", "= [] DM_text = [] print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\") for date in", "bs4 import BeautifulSoup import time import pandas as pd import requests import datetime", "[] DM_userID = [] DM_id = [] DM_text = [] print(\"正在爬取第\" + str(sets)", "# 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 # 最新一期的数字 dates=[] # 日期数组,用于填充url # 遍历日期 包括begin和end的日期", "end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as f:", "d in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i", "[] DM_mode = [] DM_font = [] DM_color = [] DM_realTime = []", "日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end = datetime.date(2020,6,9) d =", "print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\") for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8'", "[] DM_font = [] DM_color = [] DM_realTime = [] DM_pool = []", "+ str(sets) + \"期的《睡前消息》弹幕...\") for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml')", "= begin delta = datetime.timedelta(days=1) while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta", "end = datetime.date(2020,6,9) d = begin delta = datetime.timedelta(days=1) while d <= end:", "import datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 # 最新一期的数字", "f.readlines(): Cids.append(line.strip()) for cid in Cids: # 每次都要重置这些数据 dm_data = [] # 弹幕数据", "str(sets) + \"期的《睡前消息》弹幕...\") for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象", "DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text}", "# 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time = [] DM_mode = [] DM_font = []", "for d in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for", "html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体", "\"Cookie\":\"\" } sets=124 # 最新一期的数字 dates=[] # 日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin", "包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end = datetime.date(2020,6,9) d = begin delta =", "弹幕数据 dm_text = [] # 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time = [] DM_mode =", "弹幕的八个参数和弹幕本体 DM_time = [] DM_mode = [] DM_font = [] DM_color = []", "import requests import datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124", "[] # 弹幕数据 dm_text = [] # 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time = []", "d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt', 'r')", "requests import datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 #", "= datetime.date(2020,6,9) d = begin delta = datetime.timedelta(days=1) while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\")))", "[] DM_color = [] DM_realTime = [] DM_pool = [] DM_userID = []", "# 弹幕数据 dm_text = [] # 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time = [] DM_mode", "dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig') #存储弹幕信息 print(\"已将弹幕放入到Danmu-\"+str(sets)+\".csv文件中\") sets-=1 # 每抓完一个网页休眠7秒 print(\"缓冲中...\") time.sleep(7)", "pandas as pd import requests import datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明", "in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i in", "as f: for line in f.readlines(): Cids.append(line.strip()) for cid in Cids: # 每次都要重置这些数据", "DM_userID = [] DM_id = [] DM_text = [] print(\"正在爬取第\" + str(sets) +", "这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 # 最新一期的数字 dates=[] # 日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期", "d = begin delta = datetime.timedelta(days=1) while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d +=", "# 日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end = datetime.date(2020,6,9) d", "datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 # 最新一期的数字 dates=[]", "in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i in", "datetime.date(2020,6,9) d = begin delta = datetime.timedelta(days=1) while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d", "\"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 # 最新一期的数字 dates=[] # 日期数组,用于填充url # 遍历日期", "# Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as f: for line in f.readlines(): Cids.append(line.strip()) for", "headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 # 最新一期的数字 dates=[] #", "= [] DM_pool = [] DM_userID = [] DM_id = [] DM_text =", "d += delta Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as f: for line", "弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1])", "i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig') #存储弹幕信息 print(\"已将弹幕放入到Danmu-\"+str(sets)+\".csv文件中\") sets-=1 # 每抓完一个网页休眠7秒", "Cids: # 每次都要重置这些数据 dm_data = [] # 弹幕数据 dm_text = [] # 弹幕本体", "DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig') #存储弹幕信息 print(\"已将弹幕放入到Danmu-\"+str(sets)+\".csv文件中\") sets-=1 # 每抓完一个网页休眠7秒 print(\"缓冲中...\") time.sleep(7) print(\"已将《睡前消息》第110-124期的弹幕爬取完毕\")", "delta Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as f: for line in f.readlines():", "for i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for", "cid in Cids: # 每次都要重置这些数据 dm_data = [] # 弹幕数据 dm_text = []", "dm_text = [] # 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time = [] DM_mode = []", "生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end = datetime.date(2020,6,9) d = begin delta = datetime.timedelta(days=1)", "for cid in Cids: # 每次都要重置这些数据 dm_data = [] # 弹幕数据 dm_text =", "in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig') #存储弹幕信息 print(\"已将弹幕放入到Danmu-\"+str(sets)+\".csv文件中\") sets-=1 # 每抓完一个网页休眠7秒 print(\"缓冲中...\")", "as pd import requests import datetime headers={ \"User-Agent\":\"\", \"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\"", "弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4])", "#返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) #", "DM_realTime = [] DM_pool = [] DM_userID = [] DM_id = [] DM_text", "dates=[] # 日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end = datetime.date(2020,6,9)", "= [] # 弹幕数据 dm_text = [] # 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time =", "for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d", "分别把数据存进这几个数组 for i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7])", "[] DM_text = [] print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\") for date in dates:", "line in f.readlines(): Cids.append(line.strip()) for cid in Cids: # 每次都要重置这些数据 dm_data = []", "Cids.append(line.strip()) for cid in Cids: # 每次都要重置这些数据 dm_data = [] # 弹幕数据 dm_text", "= datetime.timedelta(days=1) while d <= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[] # Cid数组,用于填充url", "} sets=124 # 最新一期的数字 dates=[] # 日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin =", "dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d in all: #", "import time import pandas as pd import requests import datetime headers={ \"User-Agent\":\"\", \"Connection\":", "DM_id.append(i[7]) for i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig') #存储弹幕信息 print(\"已将弹幕放入到Danmu-\"+str(sets)+\".csv文件中\") sets-=1", "[] print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\") for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息", "dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i in dm_text:", "\"Connection\": \"keep-alive\", # 这个cookie的获取方法在文档中已说明 \"Cookie\":\"\" } sets=124 # 最新一期的数字 dates=[] # 日期数组,用于填充url #", "sets=124 # 最新一期的数字 dates=[] # 日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3)", "all=soup.find_all(\"d\") for d in all: # 弹幕数据 dm_data.append(str(d.get(\"p\")).split(\",\")) # 弹幕本体 dm_text.append(d.get_text()) # 分别把数据存进这几个数组", "# 分别把数据存进这几个数组 for i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6])", "每次都要重置这些数据 dm_data = [] # 弹幕数据 dm_text = [] # 弹幕本体 # 弹幕的八个参数和弹幕本体", "+= delta Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as f: for line in", "dm_text.append(d.get_text()) # 分别把数据存进这几个数组 for i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5])", "in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d in all:", "= [] DM_id = [] DM_text = [] print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\")", "Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as f: for line in f.readlines(): Cids.append(line.strip()) for cid", "[] DM_pool = [] DM_userID = [] DM_id = [] DM_text = []", "DM_pool = [] DM_userID = [] DM_id = [] DM_text = [] print(\"正在爬取第\"", "弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time = [] DM_mode = [] DM_font = [] DM_color", "= [] DM_realTime = [] DM_pool = [] DM_userID = [] DM_id =", "DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig')", "DM_color = [] DM_realTime = [] DM_pool = [] DM_userID = [] DM_id", "BeautifulSoup import time import pandas as pd import requests import datetime headers={ \"User-Agent\":\"\",", "# 最新一期的数字 dates=[] # 日期数组,用于填充url # 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end", "DM_font = [] DM_color = [] DM_realTime = [] DM_pool = [] DM_userID", "# 遍历日期 包括begin和end的日期 生成类似2020-05-03的格式的日期 begin = datetime.date(2020,5,3) end = datetime.date(2020,6,9) d = begin", "dm_data = [] # 弹幕数据 dm_text = [] # 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time", "begin = datetime.date(2020,5,3) end = datetime.date(2020,6,9) d = begin delta = datetime.timedelta(days=1) while", "= [] print(\"正在爬取第\" + str(sets) + \"期的《睡前消息》弹幕...\") for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers)", "for i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig') #存储弹幕信息 print(\"已将弹幕放入到Danmu-\"+str(sets)+\".csv文件中\") sets-=1 #", "date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\") for d in", "i in dm_data: DM_time.append(i[0]) DM_mode.append(i[1]) DM_font.append(i[2]) DM_color.append(i[3]) DM_realTime.append(i[4]) DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i", "# 每次都要重置这些数据 dm_data = [] # 弹幕数据 dm_text = [] # 弹幕本体 #", "<= end: dates.append(str(d.strftime(\"%Y-%m-%d\"))) d += delta Cids=[] # Cid数组,用于填充url with open('Urls/Cid.txt', 'r') as", "+ \"期的《睡前消息》弹幕...\") for date in dates: url=\"https://api.bilibili.com/x/v2/dm/history?type=1&oid=\"+cid+\"&date=\"+date html=requests.get(url=url,headers=headers) #返回文本信息 html.encoding='utf8' soup=BeautifulSoup(html.text,'lxml') #建立soup对象 all=soup.find_all(\"d\")", "[] DM_realTime = [] DM_pool = [] DM_userID = [] DM_id = []", "DM_pool.append(i[5]) DM_userID.append(i[6]) DM_id.append(i[7]) for i in dm_text: DM_text.append(i) dt={\"DM_time\":DM_time,\"DM_mode\":DM_mode,\"DM_font\":DM_font,\"DM_color\":DM_color, \"DM_realTime\":DM_realTime,\"DM_pool\":DM_pool,\"DM_userID\":DM_userID,\"DM_id\":DM_id,\"DM_text\":DM_text} d=pd.DataFrame(dt) d.to_csv('./Danmu/Danmu-'+str(sets)+'.csv',encoding='utf-8-sig') #存储弹幕信息", "[] # 弹幕本体 # 弹幕的八个参数和弹幕本体 DM_time = [] DM_mode = [] DM_font =", "= [] DM_color = [] DM_realTime = [] DM_pool = [] DM_userID =", "= datetime.date(2020,5,3) end = datetime.date(2020,6,9) d = begin delta = datetime.timedelta(days=1) while d" ]
[ "output, timeout=3): cwd, script_file = os.path.split(script) args = [sys.executable, \"-u\", script_file] try: child", "\"\"\" run all ogs5py benchmarks \"\"\" import sys import os import fnmatch import", "files in os.walk(path): for name in files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return", "timeout=3): cwd, script_file = os.path.split(script) args = [sys.executable, \"-u\", script_file] try: child =", "scripts = find(\"*.py\", out_dir) log_name = os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\"", "else: CmdRun = pexpect.spawn def call_script(script, output, timeout=3): cwd, script_file = os.path.split(script) args", "Output # pexpect.spawn just runs on unix-like systems if sys.platform == \"win32\": CmdRun", "-*- coding: utf-8 -*- \"\"\" run all ogs5py benchmarks \"\"\" import sys import", "output.write(\"...timeout\\n\".encode()) def find(pattern, path): result = [] for root, dirs, files in os.walk(path):", "\".join(args), timeout=timeout, logfile=output, cwd=cwd ) # wait for ogs to finish child.expect(pexpect.EOF) except", "files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return result if __name__ == \"__main__\": timeout", "import pexpect from ogs5py.tools.tools import Output # pexpect.spawn just runs on unix-like systems", "path): result = [] for root, dirs, files in os.walk(path): for name in", "pexpect.popen_spawn import PopenSpawn import pexpect from ogs5py.tools.tools import Output # pexpect.spawn just runs", "import PopenSpawn import pexpect from ogs5py.tools.tools import Output # pexpect.spawn just runs on", "try: child = CmdRun( \" \".join(args), timeout=timeout, logfile=output, cwd=cwd ) # wait for", "CmdRun( \" \".join(args), timeout=timeout, logfile=output, cwd=cwd ) # wait for ogs to finish", "on unix-like systems if sys.platform == \"win32\": CmdRun = PopenSpawn else: CmdRun =", "for no timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\") # out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts", "out_dir = os.path.join(os.getcwd(), \"benchmarks\") # out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir)", "[sys.executable, \"-u\", script_file] try: child = CmdRun( \" \".join(args), timeout=timeout, logfile=output, cwd=cwd )", "os import fnmatch import time from pexpect.popen_spawn import PopenSpawn import pexpect from ogs5py.tools.tools", "ogs5py benchmarks \"\"\" import sys import os import fnmatch import time from pexpect.popen_spawn", "for ogs to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path): result =", "finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path): result = [] for root,", "return result if __name__ == \"__main__\": timeout = 3 # None for no", "# wait for ogs to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path):", "if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return result if __name__ == \"__main__\": timeout =", "result if __name__ == \"__main__\": timeout = 3 # None for no timeout", "all ogs5py benchmarks \"\"\" import sys import os import fnmatch import time from", "# None for no timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\") # out_dir = os.path.join(os.getcwd(),", "script_file = os.path.split(script) args = [sys.executable, \"-u\", script_file] try: child = CmdRun( \"", "# pexpect.spawn just runs on unix-like systems if sys.platform == \"win32\": CmdRun =", "find(\"*.py\", out_dir) log_name = os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output", "+ time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output = Output(log_name, print_log=True) for script in scripts:", "timeout = 3 # None for no timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\") #", "sys import os import fnmatch import time from pexpect.popen_spawn import PopenSpawn import pexpect", "= [sys.executable, \"-u\", script_file] try: child = CmdRun( \" \".join(args), timeout=timeout, logfile=output, cwd=cwd", "result.append(os.path.join(root, name)) return result if __name__ == \"__main__\": timeout = 3 # None", "to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path): result = [] for", "logfile=output, cwd=cwd ) # wait for ogs to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode())", "\"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output = Output(log_name, print_log=True) for script in", "= os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir) log_name = os.path.join( out_dir, \"run_log_\" +", "\"win32\": CmdRun = PopenSpawn else: CmdRun = pexpect.spawn def call_script(script, output, timeout=3): cwd,", "os.path.split(script) args = [sys.executable, \"-u\", script_file] try: child = CmdRun( \" \".join(args), timeout=timeout,", "== \"win32\": CmdRun = PopenSpawn else: CmdRun = pexpect.spawn def call_script(script, output, timeout=3):", "None for no timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\") # out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\")", ") output = Output(log_name, print_log=True) for script in scripts: print(script) call_script(script, output, timeout=timeout)", "from ogs5py.tools.tools import Output # pexpect.spawn just runs on unix-like systems if sys.platform", "dirs, files in os.walk(path): for name in files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name))", "timeout=timeout, logfile=output, cwd=cwd ) # wait for ogs to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT:", "import Output # pexpect.spawn just runs on unix-like systems if sys.platform == \"win32\":", "time from pexpect.popen_spawn import PopenSpawn import pexpect from ogs5py.tools.tools import Output # pexpect.spawn", "fnmatch import time from pexpect.popen_spawn import PopenSpawn import pexpect from ogs5py.tools.tools import Output", "PopenSpawn else: CmdRun = pexpect.spawn def call_script(script, output, timeout=3): cwd, script_file = os.path.split(script)", "= PopenSpawn else: CmdRun = pexpect.spawn def call_script(script, output, timeout=3): cwd, script_file =", "+ \".txt\" ) output = Output(log_name, print_log=True) for script in scripts: print(script) call_script(script,", "def call_script(script, output, timeout=3): cwd, script_file = os.path.split(script) args = [sys.executable, \"-u\", script_file]", "\"-u\", script_file] try: child = CmdRun( \" \".join(args), timeout=timeout, logfile=output, cwd=cwd ) #", "== \"__main__\": timeout = 3 # None for no timeout out_dir = os.path.join(os.getcwd(),", "log_name = os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output = Output(log_name,", "out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir) log_name = os.path.join( out_dir, \"run_log_\"", "os.path.join(os.getcwd(), \"benchmarks\") # out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir) log_name =", "3 # None for no timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\") # out_dir =", "\"\"\" import sys import os import fnmatch import time from pexpect.popen_spawn import PopenSpawn", "os.walk(path): for name in files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return result if", "for name in files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return result if __name__", "name)) return result if __name__ == \"__main__\": timeout = 3 # None for", "# out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir) log_name = os.path.join( out_dir,", "call_script(script, output, timeout=3): cwd, script_file = os.path.split(script) args = [sys.executable, \"-u\", script_file] try:", "name in files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return result if __name__ ==", "ogs to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path): result = []", "# -*- coding: utf-8 -*- \"\"\" run all ogs5py benchmarks \"\"\" import sys", "args = [sys.executable, \"-u\", script_file] try: child = CmdRun( \" \".join(args), timeout=timeout, logfile=output,", "cwd=cwd ) # wait for ogs to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def", "utf-8 -*- \"\"\" run all ogs5py benchmarks \"\"\" import sys import os import", "result = [] for root, dirs, files in os.walk(path): for name in files:", "import os import fnmatch import time from pexpect.popen_spawn import PopenSpawn import pexpect from", "runs on unix-like systems if sys.platform == \"win32\": CmdRun = PopenSpawn else: CmdRun", "child = CmdRun( \" \".join(args), timeout=timeout, logfile=output, cwd=cwd ) # wait for ogs", "pattern): result.append(os.path.join(root, name)) return result if __name__ == \"__main__\": timeout = 3 #", "fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return result if __name__ == \"__main__\": timeout = 3", "script_file] try: child = CmdRun( \" \".join(args), timeout=timeout, logfile=output, cwd=cwd ) # wait", "= CmdRun( \" \".join(args), timeout=timeout, logfile=output, cwd=cwd ) # wait for ogs to", "= os.path.split(script) args = [sys.executable, \"-u\", script_file] try: child = CmdRun( \" \".join(args),", "unix-like systems if sys.platform == \"win32\": CmdRun = PopenSpawn else: CmdRun = pexpect.spawn", "in os.walk(path): for name in files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return result", "\" \".join(args), timeout=timeout, logfile=output, cwd=cwd ) # wait for ogs to finish child.expect(pexpect.EOF)", "for root, dirs, files in os.walk(path): for name in files: if fnmatch.fnmatch(name, pattern):", "CmdRun = pexpect.spawn def call_script(script, output, timeout=3): cwd, script_file = os.path.split(script) args =", "no timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\") # out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts =", ") # wait for ogs to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern,", "root, dirs, files in os.walk(path): for name in files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root,", "timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\") # out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\",", "= 3 # None for no timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\") # out_dir", "-*- \"\"\" run all ogs5py benchmarks \"\"\" import sys import os import fnmatch", "output = Output(log_name, print_log=True) for script in scripts: print(script) call_script(script, output, timeout=timeout) output.close()", "systems if sys.platform == \"win32\": CmdRun = PopenSpawn else: CmdRun = pexpect.spawn def", "just runs on unix-like systems if sys.platform == \"win32\": CmdRun = PopenSpawn else:", "wait for ogs to finish child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path): result", "os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir) log_name = os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\")", "pexpect from ogs5py.tools.tools import Output # pexpect.spawn just runs on unix-like systems if", "__name__ == \"__main__\": timeout = 3 # None for no timeout out_dir =", "os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output = Output(log_name, print_log=True) for", "= os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output = Output(log_name, print_log=True)", "= pexpect.spawn def call_script(script, output, timeout=3): cwd, script_file = os.path.split(script) args = [sys.executable,", "CmdRun = PopenSpawn else: CmdRun = pexpect.spawn def call_script(script, output, timeout=3): cwd, script_file", "\"__main__\": timeout = 3 # None for no timeout out_dir = os.path.join(os.getcwd(), \"benchmarks\")", "= find(\"*.py\", out_dir) log_name = os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" )", "benchmarks \"\"\" import sys import os import fnmatch import time from pexpect.popen_spawn import", "out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output = Output(log_name, print_log=True) for script", "pexpect.spawn def call_script(script, output, timeout=3): cwd, script_file = os.path.split(script) args = [sys.executable, \"-u\",", "\".txt\" ) output = Output(log_name, print_log=True) for script in scripts: print(script) call_script(script, output,", "\"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir) log_name = os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") +", "child.expect(pexpect.EOF) except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path): result = [] for root, dirs,", "in files: if fnmatch.fnmatch(name, pattern): result.append(os.path.join(root, name)) return result if __name__ == \"__main__\":", "except pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path): result = [] for root, dirs, files", "ogs5py.tools.tools import Output # pexpect.spawn just runs on unix-like systems if sys.platform ==", "find(pattern, path): result = [] for root, dirs, files in os.walk(path): for name", "pexpect.TIMEOUT: output.write(\"...timeout\\n\".encode()) def find(pattern, path): result = [] for root, dirs, files in", "coding: utf-8 -*- \"\"\" run all ogs5py benchmarks \"\"\" import sys import os", "from pexpect.popen_spawn import PopenSpawn import pexpect from ogs5py.tools.tools import Output # pexpect.spawn just", "PopenSpawn import pexpect from ogs5py.tools.tools import Output # pexpect.spawn just runs on unix-like", "\"benchmarks\") # out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir) log_name = os.path.join(", "import sys import os import fnmatch import time from pexpect.popen_spawn import PopenSpawn import", "time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output = Output(log_name, print_log=True) for script in scripts: print(script)", "= os.path.join(os.getcwd(), \"benchmarks\") # out_dir = os.path.join(os.getcwd(), \"benchmarks_FEM_active\") scripts = find(\"*.py\", out_dir) log_name", "out_dir) log_name = os.path.join( out_dir, \"run_log_\" + time.strftime(\"%Y-%m-%d_%H-%M-%S\") + \".txt\" ) output =", "sys.platform == \"win32\": CmdRun = PopenSpawn else: CmdRun = pexpect.spawn def call_script(script, output,", "cwd, script_file = os.path.split(script) args = [sys.executable, \"-u\", script_file] try: child = CmdRun(", "def find(pattern, path): result = [] for root, dirs, files in os.walk(path): for", "if __name__ == \"__main__\": timeout = 3 # None for no timeout out_dir", "= [] for root, dirs, files in os.walk(path): for name in files: if", "[] for root, dirs, files in os.walk(path): for name in files: if fnmatch.fnmatch(name,", "run all ogs5py benchmarks \"\"\" import sys import os import fnmatch import time", "pexpect.spawn just runs on unix-like systems if sys.platform == \"win32\": CmdRun = PopenSpawn", "import time from pexpect.popen_spawn import PopenSpawn import pexpect from ogs5py.tools.tools import Output #", "import fnmatch import time from pexpect.popen_spawn import PopenSpawn import pexpect from ogs5py.tools.tools import", "if sys.platform == \"win32\": CmdRun = PopenSpawn else: CmdRun = pexpect.spawn def call_script(script," ]
[ "condition def __str__(self): return \"RepeatUntil: %s %s\" % (str(self.statements), str(self.condition)) class While(BaseNode): def", "concatenation operator. \"\"\" return self.bit_size def __str__(self): return str(self.value) class List(BaseNode): def __init__(self,", "__str__(self): args = [str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\"", "len(self.values) def __str__(self): return \"(%s)\" % \", \".join(map(str, self.values)) class Enumeration(BaseNode): def __init__(self,", "def __str__(self): return \"BitExtraction: %s %s\" % (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def __init__(self,", "class NumberValue(BaseNode): def __init__(self, value, bit_size=32): self.value = value self.bit_size = bit_size def", "return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\" class Return(BaseNode): def __init__(self, value):", "= \"\" def __str__(self): return \"Unpredictable\" class See(BaseNode): def __init__(self, msg): self.msg =", "__init__(self, type_, left_expr, right_expr): self.type = type_ self.left_expr = left_expr self.right_expr = right_expr", "= [str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" % (str(self.name),", "return \"ArrayAccess: %s[%s]\" % (str(self.name), \" \".join(args)) class MaskedBinary(BaseNode): def __init__(self, value): self.value", "def __str__(self): return \"Unpredictable\" class See(BaseNode): def __init__(self, msg): self.msg = msg.strip('\"') def", "% (str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def __init__(self, type_, left_expr, right_expr): self.type = type_", "def __init__(self, msg): self.msg = msg.strip('\"') def __str__(self): return \"See: %s\" % (str(self.msg))", "% (str(self.expr), str(self.cases)) class Undefined(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return", "falseValue): self.condition = condition self.trueValue = trueValue self.falseValue = falseValue def __str__(self): return", "__str__(self): return \"IfExpression: %s %s %s\" % (str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def", "of the number. Important for things like the concatenation operator. \"\"\" return self.bit_size", "map(str, self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode): def __init__(self, identifier_, range_): self.identifier = identifier_", "(str(self.name), \" \".join(args)) class MaskedBinary(BaseNode): def __init__(self, value): self.value = value def __str__(self):", "__init__(self, values): self.values = values def __len__(self): return len(self.values) def __str__(self): return \"{%s}\"", "def __init__(self, value): self.value = value def __str__(self): return \"MaskedBinary: %s\" % (str(self.value))", "self.range = range_ def __str__(self): return \"BitExtraction: %s %s\" % (str(self.identifier), str(self.range)) class", "%s %s %s\" % (str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def __init__(self, value, statements):", "__str__(self): return \"%s %s %s\" % (str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self,", "ArrayAccess(BaseNode): def __init__(self, name, expr1, expr2, expr3): self.name = name self.expr1 = expr1", "(str(self.value)) class Ignore(BaseNode): def __str__(self): return \"Ignore\" class IfExpression(BaseNode): def __init__(self, condition, trueValue,", "class Identifier(BaseNode): def __init__(self, name): self.name = name def __str__(self): return str(self.name) class", "def __init__(self, type_, left_expr, right_expr): self.type = type_ self.left_expr = left_expr self.right_expr =", "%s %s\" % (str(self.statements), str(self.condition)) class While(BaseNode): def __init__(self, condition, statements): self.condition =", "class CaseElement(BaseNode): def __init__(self, value, statements): self.value = value self.statements = statements def", "__str__(self): return \"{%s}\" % \", \".join(map(str, self.values)) class UnaryExpression(BaseNode): def __init__(self, type_, expr):", "return \"While: %s %s\" % (str(self.condition), str(self.statements)) class For(BaseNode): def __init__(self, from_, to,", "def __str__(self): return \"IfExpression: %s %s %s\" % (str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode):", "expr1 self.expr2 = expr2 self.expr3 = expr3 def __str__(self): args = [str(self.expr1)] if", "__str__(self): return \"%s(%s)\" % (str(self.name), \", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def __init__(self, statements,", "class Enumeration(BaseNode): def __init__(self, values): self.values = values def __len__(self): return len(self.values) def", "= range_ def __str__(self): return \"BitExtraction: %s %s\" % (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode):", "\"\"\" Return the bitsize of the number. Important for things like the concatenation", "BinaryExpression(BaseNode): def __init__(self, type_, left_expr, right_expr): self.type = type_ self.left_expr = left_expr self.right_expr", "len(self.values) def __str__(self): return \"{%s}\" % \", \".join(map(str, self.values)) class UnaryExpression(BaseNode): def __init__(self,", "class SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\" class Return(BaseNode): def __init__(self, value): self.value =", "__init__(self, condition, statements): self.condition = condition self.statements = statements def __str__(self): return \"While:", "List(BaseNode): def __init__(self, values): self.values = values def __len__(self): return len(self.values) def __str__(self):", "% (str(self.from_), str(self.to), str(self.statements)) class If(BaseNode): def __init__(self, condition, if_statements, else_statements): self.condition =", "def __str__(self): return \"If: %s %s %s\" % (str(self.condition), map(str, self.if_statements), map(str, self.else_statements))", "return \"RepeatUntil: %s %s\" % (str(self.statements), str(self.condition)) class While(BaseNode): def __init__(self, condition, statements):", "Unpredictable(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return \"Unpredictable\" class See(BaseNode): def", "left_expr self.right_expr = right_expr def __str__(self): return \"%s %s %s\" % (str(self.type), str(self.left_expr),", "value, statements): self.value = value self.statements = statements def __str__(self): return \"CaseElement: %s", "= name_ self.arguments = arguments def __str__(self): return \"%s(%s)\" % (str(self.name), \", \".join(map(str,", "self.if_statements = if_statements self.else_statements = else_statements def __str__(self): return \"If: %s %s %s\"", "return \"BitExtraction: %s %s\" % (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def __init__(self, name, expr1,", "RepeatUntil(BaseNode): def __init__(self, statements, condition): self.statements = statements self.condition = condition def __str__(self):", "def __str__(self): return \"See: %s\" % (str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\"", "statements): self.value = value self.statements = statements def __str__(self): return \"CaseElement: %s %s\"", "self.cases = cases def __str__(self): return \"Case: %s %s\" % (str(self.expr), str(self.cases)) class", "__str__(self): return \"Case: %s %s\" % (str(self.expr), str(self.cases)) class Undefined(BaseNode): def __init__(self): self.reason", "class BooleanValue(BaseNode): def __init__(self, value): self.value = value def __str__(self): return str(\"true\" if", "= arguments def __str__(self): return \"%s(%s)\" % (str(self.name), \", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode):", "self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" % (str(self.name), \" \".join(args)) class", "statements): self.from_ = from_ self.to = to self.statements = statements def __str__(self): return", "(str(self.condition), map(str, self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode): def __init__(self, identifier_, range_): self.identifier =", "name): self.name = name def __str__(self): return str(self.name) class NumberValue(BaseNode): def __init__(self, value,", "def __str__(self): return str(self.value) class List(BaseNode): def __init__(self, values): self.values = values def", "def __str__(self): return \"(%s)\" % \", \".join(map(str, self.values)) class Enumeration(BaseNode): def __init__(self, values):", "class BinaryExpression(BaseNode): def __init__(self, type_, left_expr, right_expr): self.type = type_ self.left_expr = left_expr", "__init__(self, name, expr1, expr2, expr3): self.name = name self.expr1 = expr1 self.expr2 =", "self.expr = expr def __str__(self): return \"%s%s\" % (str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def", "(str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def __init__(self, value, statements): self.value = value self.statements", "self.falseValue = falseValue def __str__(self): return \"IfExpression: %s %s %s\" % (str(self.condition), str(self.trueValue),", "str(self.range)) class ArrayAccess(BaseNode): def __init__(self, name, expr1, expr2, expr3): self.name = name self.expr1", "\"SubArchitectureDefined\" class Return(BaseNode): def __init__(self, value): self.value = value def __str__(self): return \"Return:", "\"Undefined\" class Unpredictable(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return \"Unpredictable\" class", "return \"IfExpression: %s %s %s\" % (str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def __init__(self,", "self.statements = statements def __str__(self): return \"For: %s %s %s\" % (str(self.from_), str(self.to),", "UnaryExpression(BaseNode): def __init__(self, type_, expr): self.type = type_ self.expr = expr def __str__(self):", "return str(\"true\" if self.value else \"false\") class Identifier(BaseNode): def __init__(self, name): self.name =", "def __len__(self): return len(self.values) def __str__(self): return \"(%s)\" % \", \".join(map(str, self.values)) class", "expr3 def __str__(self): args = [str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return", "ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\" class Return(BaseNode):", "%s\" % (str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self, name_, arguments): self.name =", "def __str__(self): return str(self.name) class NumberValue(BaseNode): def __init__(self, value, bit_size=32): self.value = value", "(str(self.value), str(self.statements)) class Case(BaseNode): def __init__(self, expr, cases): self.expr = expr self.cases =", "self.expr = expr self.cases = cases def __str__(self): return \"Case: %s %s\" %", "__init__(self, msg): self.msg = msg.strip('\"') def __str__(self): return \"See: %s\" % (str(self.msg)) class", "def __len__(self): \"\"\" Return the bitsize of the number. Important for things like", "def __str__(self): return \"{%s}\" % \", \".join(map(str, self.values)) class UnaryExpression(BaseNode): def __init__(self, type_,", "= left_expr self.right_expr = right_expr def __str__(self): return \"%s %s %s\" % (str(self.type),", "\"%s(%s)\" % (str(self.name), \", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def __init__(self, statements, condition): self.statements", "__init__(self, type_, expr): self.type = type_ self.expr = expr def __str__(self): return \"%s%s\"", "__len__(self): \"\"\" Return the bitsize of the number. Important for things like the", "__init__(self, identifier_, range_): self.identifier = identifier_ self.range = range_ def __str__(self): return \"BitExtraction:", "\"\" def __str__(self): return \"Undefined\" class Unpredictable(BaseNode): def __init__(self): self.reason = \"\" def", "self.trueValue = trueValue self.falseValue = falseValue def __str__(self): return \"IfExpression: %s %s %s\"", "return str(self.value) class List(BaseNode): def __init__(self, values): self.values = values def __len__(self): return", "range_ def __str__(self): return \"BitExtraction: %s %s\" % (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def", "\", \".join(map(str, self.values)) class Enumeration(BaseNode): def __init__(self, values): self.values = values def __len__(self):", "__str__(self): return str(\"true\" if self.value else \"false\") class Identifier(BaseNode): def __init__(self, name): self.name", "= expr1 self.expr2 = expr2 self.expr3 = expr3 def __str__(self): args = [str(self.expr1)]", "def __str__(self): return \"Case: %s %s\" % (str(self.expr), str(self.cases)) class Undefined(BaseNode): def __init__(self):", "self.arguments))) class RepeatUntil(BaseNode): def __init__(self, statements, condition): self.statements = statements self.condition = condition", "class MaskedBinary(BaseNode): def __init__(self, value): self.value = value def __str__(self): return \"MaskedBinary: %s\"", "= trueValue self.falseValue = falseValue def __str__(self): return \"IfExpression: %s %s %s\" %", "else \"false\") class Identifier(BaseNode): def __init__(self, name): self.name = name def __str__(self): return", "self.expr3 = expr3 def __str__(self): args = [str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if self.expr3:", "self.arguments = arguments def __str__(self): return \"%s(%s)\" % (str(self.name), \", \".join(map(str, self.arguments))) class", "name self.expr1 = expr1 self.expr2 = expr2 self.expr3 = expr3 def __str__(self): args", "map(str, self.else_statements)) class BitExtraction(BaseNode): def __init__(self, identifier_, range_): self.identifier = identifier_ self.range =", "str(self.statements)) class For(BaseNode): def __init__(self, from_, to, statements): self.from_ = from_ self.to =", "%s\" % (str(self.value)) class Ignore(BaseNode): def __str__(self): return \"Ignore\" class IfExpression(BaseNode): def __init__(self,", "def __init__(self, name): self.name = name def __str__(self): return str(self.name) class NumberValue(BaseNode): def", "__str__(self): return \"While: %s %s\" % (str(self.condition), str(self.statements)) class For(BaseNode): def __init__(self, from_,", "if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" % (str(self.name), \" \".join(args)) class MaskedBinary(BaseNode): def", "def __init__(self, value): self.value = value def __str__(self): return str(\"true\" if self.value else", "= expr3 def __str__(self): args = [str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3))", "left_expr, right_expr): self.type = type_ self.left_expr = left_expr self.right_expr = right_expr def __str__(self):", "class UnaryExpression(BaseNode): def __init__(self, type_, expr): self.type = type_ self.expr = expr def", "self.reason = \"\" def __str__(self): return \"Unpredictable\" class See(BaseNode): def __init__(self, msg): self.msg", "return \"If: %s %s %s\" % (str(self.condition), map(str, self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode):", "self.expr2 = expr2 self.expr3 = expr3 def __str__(self): args = [str(self.expr1)] if self.expr2:", "\"{%s}\" % \", \".join(map(str, self.values)) class UnaryExpression(BaseNode): def __init__(self, type_, expr): self.type =", "def __str__(self): return \"%s %s %s\" % (str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def", "self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode): def __init__(self, identifier_, range_): self.identifier = identifier_ self.range", "= value def __str__(self): return \"MaskedBinary: %s\" % (str(self.value)) class Ignore(BaseNode): def __str__(self):", "return \"Case: %s %s\" % (str(self.expr), str(self.cases)) class Undefined(BaseNode): def __init__(self): self.reason =", "self.values)) class UnaryExpression(BaseNode): def __init__(self, type_, expr): self.type = type_ self.expr = expr", "= name def __str__(self): return str(self.name) class NumberValue(BaseNode): def __init__(self, value, bit_size=32): self.value", "BaseNode(object): def accept(self, visitor): return visitor.accept(self) class BooleanValue(BaseNode): def __init__(self, value): self.value =", "(str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self, name_, arguments): self.name = name_ self.arguments", "\"%s%s\" % (str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def __init__(self, type_, left_expr, right_expr): self.type =", "self.statements = statements def __str__(self): return \"CaseElement: %s %s\" % (str(self.value), str(self.statements)) class", "class ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\" class", "range_): self.identifier = identifier_ self.range = range_ def __str__(self): return \"BitExtraction: %s %s\"", "__str__(self): return \"See: %s\" % (str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\" class", "__init__(self, name_, arguments): self.name = name_ self.arguments = arguments def __str__(self): return \"%s(%s)\"", "= statements def __str__(self): return \"While: %s %s\" % (str(self.condition), str(self.statements)) class For(BaseNode):", "__str__(self): return \"(%s)\" % \", \".join(map(str, self.values)) class Enumeration(BaseNode): def __init__(self, values): self.values", "%s\" % (str(self.value), str(self.statements)) class Case(BaseNode): def __init__(self, expr, cases): self.expr = expr", "value def __str__(self): return \"MaskedBinary: %s\" % (str(self.value)) class Ignore(BaseNode): def __str__(self): return", "\"ArrayAccess: %s[%s]\" % (str(self.name), \" \".join(args)) class MaskedBinary(BaseNode): def __init__(self, value): self.value =", "arguments): self.name = name_ self.arguments = arguments def __str__(self): return \"%s(%s)\" % (str(self.name),", "str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def __init__(self, value, statements): self.value = value self.statements =", "\"For: %s %s %s\" % (str(self.from_), str(self.to), str(self.statements)) class If(BaseNode): def __init__(self, condition,", "Enumeration(BaseNode): def __init__(self, values): self.values = values def __len__(self): return len(self.values) def __str__(self):", "return \"(%s)\" % \", \".join(map(str, self.values)) class Enumeration(BaseNode): def __init__(self, values): self.values =", "= condition self.trueValue = trueValue self.falseValue = falseValue def __str__(self): return \"IfExpression: %s", "value, bit_size=32): self.value = value self.bit_size = bit_size def __len__(self): \"\"\" Return the", "\"See: %s\" % (str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def", "% (str(self.value)) class Ignore(BaseNode): def __str__(self): return \"Ignore\" class IfExpression(BaseNode): def __init__(self, condition,", "expr, cases): self.expr = expr self.cases = cases def __str__(self): return \"Case: %s", "operator. \"\"\" return self.bit_size def __str__(self): return str(self.value) class List(BaseNode): def __init__(self, values):", "msg.strip('\"') def __str__(self): return \"See: %s\" % (str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self): return", "\", \".join(map(str, self.values)) class UnaryExpression(BaseNode): def __init__(self, type_, expr): self.type = type_ self.expr", "type_ self.left_expr = left_expr self.right_expr = right_expr def __str__(self): return \"%s %s %s\"", "condition, trueValue, falseValue): self.condition = condition self.trueValue = trueValue self.falseValue = falseValue def", "%s %s\" % (str(self.condition), map(str, self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode): def __init__(self, identifier_,", "%s %s %s\" % (str(self.from_), str(self.to), str(self.statements)) class If(BaseNode): def __init__(self, condition, if_statements,", "def __str__(self): return \"For: %s %s %s\" % (str(self.from_), str(self.to), str(self.statements)) class If(BaseNode):", "% \", \".join(map(str, self.values)) class UnaryExpression(BaseNode): def __init__(self, type_, expr): self.type = type_", "__str__(self): return \"SubArchitectureDefined\" class Return(BaseNode): def __init__(self, value): self.value = value def __str__(self):", "class List(BaseNode): def __init__(self, values): self.values = values def __len__(self): return len(self.values) def", "[str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" % (str(self.name), \"", "self.condition = condition self.statements = statements def __str__(self): return \"While: %s %s\" %", "__init__(self, name): self.name = name def __str__(self): return str(self.name) class NumberValue(BaseNode): def __init__(self,", "def __str__(self): return \"Ignore\" class IfExpression(BaseNode): def __init__(self, condition, trueValue, falseValue): self.condition =", "%s\" % (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def __init__(self, name, expr1, expr2, expr3): self.name", "= expr2 self.expr3 = expr3 def __str__(self): args = [str(self.expr1)] if self.expr2: args.append(str(self.expr2))", "def __init__(self, value): self.value = value def __str__(self): return \"Return: %s\" % (str(self.value))", "from_, to, statements): self.from_ = from_ self.to = to self.statements = statements def", "__str__(self): return str(self.value) class List(BaseNode): def __init__(self, values): self.values = values def __len__(self):", "__str__(self): return \"Ignore\" class IfExpression(BaseNode): def __init__(self, condition, trueValue, falseValue): self.condition = condition", "%s[%s]\" % (str(self.name), \" \".join(args)) class MaskedBinary(BaseNode): def __init__(self, value): self.value = value", "statements): self.condition = condition self.statements = statements def __str__(self): return \"While: %s %s\"", "\".join(map(str, self.values)) class UnaryExpression(BaseNode): def __init__(self, type_, expr): self.type = type_ self.expr =", "value def __str__(self): return str(\"true\" if self.value else \"false\") class Identifier(BaseNode): def __init__(self,", "def __init__(self, values): self.values = values def __len__(self): return len(self.values) def __str__(self): return", "ProcedureCall(BaseNode): def __init__(self, name_, arguments): self.name = name_ self.arguments = arguments def __str__(self):", "statements def __str__(self): return \"CaseElement: %s %s\" % (str(self.value), str(self.statements)) class Case(BaseNode): def", "expr3): self.name = name self.expr1 = expr1 self.expr2 = expr2 self.expr3 = expr3", "__init__(self, condition, trueValue, falseValue): self.condition = condition self.trueValue = trueValue self.falseValue = falseValue", "identifier_, range_): self.identifier = identifier_ self.range = range_ def __str__(self): return \"BitExtraction: %s", "= statements def __str__(self): return \"CaseElement: %s %s\" % (str(self.value), str(self.statements)) class Case(BaseNode):", "__str__(self): return \"CaseElement: %s %s\" % (str(self.value), str(self.statements)) class Case(BaseNode): def __init__(self, expr,", "return \"%s%s\" % (str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def __init__(self, type_, left_expr, right_expr): self.type", "__init__(self, statements, condition): self.statements = statements self.condition = condition def __str__(self): return \"RepeatUntil:", "value self.statements = statements def __str__(self): return \"CaseElement: %s %s\" % (str(self.value), str(self.statements))", "values def __len__(self): return len(self.values) def __str__(self): return \"(%s)\" % \", \".join(map(str, self.values))", "condition self.statements = statements def __str__(self): return \"While: %s %s\" % (str(self.condition), str(self.statements))", "self.value = value self.statements = statements def __str__(self): return \"CaseElement: %s %s\" %", "\"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\" class Return(BaseNode): def __init__(self, value): self.value", "% (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def __init__(self, name, expr1, expr2, expr3): self.name =", "str(\"true\" if self.value else \"false\") class Identifier(BaseNode): def __init__(self, name): self.name = name", "return \"%s(%s)\" % (str(self.name), \", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def __init__(self, statements, condition):", "the bitsize of the number. Important for things like the concatenation operator. \"\"\"", "return \"CaseElement: %s %s\" % (str(self.value), str(self.statements)) class Case(BaseNode): def __init__(self, expr, cases):", "def __init__(self, expr, cases): self.expr = expr self.cases = cases def __str__(self): return", "def __str__(self): return \"%s(%s)\" % (str(self.name), \", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def __init__(self,", "self.condition = condition def __str__(self): return \"RepeatUntil: %s %s\" % (str(self.statements), str(self.condition)) class", "\"Case: %s %s\" % (str(self.expr), str(self.cases)) class Undefined(BaseNode): def __init__(self): self.reason = \"\"", "BitExtraction(BaseNode): def __init__(self, identifier_, range_): self.identifier = identifier_ self.range = range_ def __str__(self):", "= value self.bit_size = bit_size def __len__(self): \"\"\" Return the bitsize of the", "%s\" % (str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self):", "__init__(self, value, statements): self.value = value self.statements = statements def __str__(self): return \"CaseElement:", "def __str__(self): return \"While: %s %s\" % (str(self.condition), str(self.statements)) class For(BaseNode): def __init__(self,", "bitsize of the number. Important for things like the concatenation operator. \"\"\" return", "%s %s\" % (str(self.condition), str(self.statements)) class For(BaseNode): def __init__(self, from_, to, statements): self.from_", "expr2 self.expr3 = expr3 def __str__(self): args = [str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if", "% (str(self.condition), map(str, self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode): def __init__(self, identifier_, range_): self.identifier", "__str__(self): return str(self.name) class NumberValue(BaseNode): def __init__(self, value, bit_size=32): self.value = value self.bit_size", "def __init__(self, value, statements): self.value = value self.statements = statements def __str__(self): return", "to, statements): self.from_ = from_ self.to = to self.statements = statements def __str__(self):", "return \"{%s}\" % \", \".join(map(str, self.values)) class UnaryExpression(BaseNode): def __init__(self, type_, expr): self.type", "the number. Important for things like the concatenation operator. \"\"\" return self.bit_size def", "value): self.value = value def __str__(self): return \"MaskedBinary: %s\" % (str(self.value)) class Ignore(BaseNode):", "MaskedBinary(BaseNode): def __init__(self, value): self.value = value def __str__(self): return \"MaskedBinary: %s\" %", "return len(self.values) def __str__(self): return \"(%s)\" % \", \".join(map(str, self.values)) class Enumeration(BaseNode): def", "__init__(self, values): self.values = values def __len__(self): return len(self.values) def __str__(self): return \"(%s)\"", "condition): self.statements = statements self.condition = condition def __str__(self): return \"RepeatUntil: %s %s\"", "def __str__(self): args = [str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess:", "__str__(self): return \"If: %s %s %s\" % (str(self.condition), map(str, self.if_statements), map(str, self.else_statements)) class", "= condition self.if_statements = if_statements self.else_statements = else_statements def __str__(self): return \"If: %s", "class Return(BaseNode): def __init__(self, value): self.value = value def __str__(self): return \"Return: %s\"", "str(self.statements)) class Case(BaseNode): def __init__(self, expr, cases): self.expr = expr self.cases = cases", "class If(BaseNode): def __init__(self, condition, if_statements, else_statements): self.condition = condition self.if_statements = if_statements", "expr1, expr2, expr3): self.name = name self.expr1 = expr1 self.expr2 = expr2 self.expr3", "= type_ self.left_expr = left_expr self.right_expr = right_expr def __str__(self): return \"%s %s", "NumberValue(BaseNode): def __init__(self, value, bit_size=32): self.value = value self.bit_size = bit_size def __len__(self):", "\"IfExpression: %s %s %s\" % (str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def __init__(self, value,", "else_statements def __str__(self): return \"If: %s %s %s\" % (str(self.condition), map(str, self.if_statements), map(str,", "__str__(self): return \"%s%s\" % (str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def __init__(self, type_, left_expr, right_expr):", "cases def __str__(self): return \"Case: %s %s\" % (str(self.expr), str(self.cases)) class Undefined(BaseNode): def", "\".join(map(str, self.values)) class Enumeration(BaseNode): def __init__(self, values): self.values = values def __len__(self): return", "= else_statements def __str__(self): return \"If: %s %s %s\" % (str(self.condition), map(str, self.if_statements),", "def __init__(self, identifier_, range_): self.identifier = identifier_ self.range = range_ def __str__(self): return", "value self.bit_size = bit_size def __len__(self): \"\"\" Return the bitsize of the number.", "def __init__(self, name_, arguments): self.name = name_ self.arguments = arguments def __str__(self): return", "for things like the concatenation operator. \"\"\" return self.bit_size def __str__(self): return str(self.value)", "str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self, name_, arguments): self.name = name_ self.arguments = arguments", "%s %s %s\" % (str(self.condition), map(str, self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode): def __init__(self,", "def __len__(self): return len(self.values) def __str__(self): return \"{%s}\" % \", \".join(map(str, self.values)) class", "__str__(self): return \"Undefined\" class Unpredictable(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return", "= statements self.condition = condition def __str__(self): return \"RepeatUntil: %s %s\" % (str(self.statements),", "number. Important for things like the concatenation operator. \"\"\" return self.bit_size def __str__(self):", "Identifier(BaseNode): def __init__(self, name): self.name = name def __str__(self): return str(self.name) class NumberValue(BaseNode):", "__str__(self): return \"BitExtraction: %s %s\" % (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def __init__(self, name,", "self.expr1 = expr1 self.expr2 = expr2 self.expr3 = expr3 def __str__(self): args =", "return \"For: %s %s %s\" % (str(self.from_), str(self.to), str(self.statements)) class If(BaseNode): def __init__(self,", "\"RepeatUntil: %s %s\" % (str(self.statements), str(self.condition)) class While(BaseNode): def __init__(self, condition, statements): self.condition", "trueValue, falseValue): self.condition = condition self.trueValue = trueValue self.falseValue = falseValue def __str__(self):", "return len(self.values) def __str__(self): return \"{%s}\" % \", \".join(map(str, self.values)) class UnaryExpression(BaseNode): def", "return \"SubArchitectureDefined\" class Return(BaseNode): def __init__(self, value): self.value = value def __str__(self): return", "self.values = values def __len__(self): return len(self.values) def __str__(self): return \"(%s)\" % \",", "= from_ self.to = to self.statements = statements def __str__(self): return \"For: %s", "% (str(self.value), str(self.statements)) class Case(BaseNode): def __init__(self, expr, cases): self.expr = expr self.cases", "__len__(self): return len(self.values) def __str__(self): return \"(%s)\" % \", \".join(map(str, self.values)) class Enumeration(BaseNode):", "args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" % (str(self.name), \" \".join(args)) class MaskedBinary(BaseNode):", "visitor.accept(self) class BooleanValue(BaseNode): def __init__(self, value): self.value = value def __str__(self): return str(\"true\"", "str(self.to), str(self.statements)) class If(BaseNode): def __init__(self, condition, if_statements, else_statements): self.condition = condition self.if_statements", "trueValue self.falseValue = falseValue def __str__(self): return \"IfExpression: %s %s %s\" % (str(self.condition),", "return \"%s %s %s\" % (str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self, name_,", "class BaseNode(object): def accept(self, visitor): return visitor.accept(self) class BooleanValue(BaseNode): def __init__(self, value): self.value", "def __init__(self): self.reason = \"\" def __str__(self): return \"Unpredictable\" class See(BaseNode): def __init__(self,", "self.name = name_ self.arguments = arguments def __str__(self): return \"%s(%s)\" % (str(self.name), \",", "self.from_ = from_ self.to = to self.statements = statements def __str__(self): return \"For:", "(str(self.name), \", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def __init__(self, statements, condition): self.statements = statements", "class IfExpression(BaseNode): def __init__(self, condition, trueValue, falseValue): self.condition = condition self.trueValue = trueValue", "def __init__(self, condition, statements): self.condition = condition self.statements = statements def __str__(self): return", "__init__(self, value): self.value = value def __str__(self): return \"MaskedBinary: %s\" % (str(self.value)) class", "self.statements = statements self.condition = condition def __str__(self): return \"RepeatUntil: %s %s\" %", "__str__(self): return \"MaskedBinary: %s\" % (str(self.value)) class Ignore(BaseNode): def __str__(self): return \"Ignore\" class", "condition self.if_statements = if_statements self.else_statements = else_statements def __str__(self): return \"If: %s %s", "self.values = values def __len__(self): return len(self.values) def __str__(self): return \"{%s}\" % \",", "\"If: %s %s %s\" % (str(self.condition), map(str, self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode): def", "\"%s %s %s\" % (str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self, name_, arguments):", "self.msg = msg.strip('\"') def __str__(self): return \"See: %s\" % (str(self.msg)) class ImplementationDefined(BaseNode): def", "% (str(self.statements), str(self.condition)) class While(BaseNode): def __init__(self, condition, statements): self.condition = condition self.statements", "def __init__(self, statements, condition): self.statements = statements self.condition = condition def __str__(self): return", "= right_expr def __str__(self): return \"%s %s %s\" % (str(self.type), str(self.left_expr), str(self.right_expr)) class", "def __str__(self): return \"Undefined\" class Unpredictable(BaseNode): def __init__(self): self.reason = \"\" def __str__(self):", "%s %s\" % (str(self.from_), str(self.to), str(self.statements)) class If(BaseNode): def __init__(self, condition, if_statements, else_statements):", "(str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def __init__(self, name, expr1, expr2, expr3): self.name = name", "def __str__(self): return \"CaseElement: %s %s\" % (str(self.value), str(self.statements)) class Case(BaseNode): def __init__(self,", "identifier_ self.range = range_ def __str__(self): return \"BitExtraction: %s %s\" % (str(self.identifier), str(self.range))", "Return the bitsize of the number. Important for things like the concatenation operator.", "BooleanValue(BaseNode): def __init__(self, value): self.value = value def __str__(self): return str(\"true\" if self.value", "value): self.value = value def __str__(self): return str(\"true\" if self.value else \"false\") class", "self.value = value def __str__(self): return \"MaskedBinary: %s\" % (str(self.value)) class Ignore(BaseNode): def", "__init__(self): self.reason = \"\" def __str__(self): return \"Undefined\" class Unpredictable(BaseNode): def __init__(self): self.reason", "return \"See: %s\" % (str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode):", "__str__(self): return \"Unpredictable\" class See(BaseNode): def __init__(self, msg): self.msg = msg.strip('\"') def __str__(self):", "= values def __len__(self): return len(self.values) def __str__(self): return \"{%s}\" % \", \".join(map(str,", "__str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\" class Return(BaseNode): def __init__(self,", "= condition self.statements = statements def __str__(self): return \"While: %s %s\" % (str(self.condition),", "class Unpredictable(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return \"Unpredictable\" class See(BaseNode):", "Important for things like the concatenation operator. \"\"\" return self.bit_size def __str__(self): return", "def __init__(self, name, expr1, expr2, expr3): self.name = name self.expr1 = expr1 self.expr2", "(str(self.from_), str(self.to), str(self.statements)) class If(BaseNode): def __init__(self, condition, if_statements, else_statements): self.condition = condition", "= expr self.cases = cases def __str__(self): return \"Case: %s %s\" % (str(self.expr),", "arguments def __str__(self): return \"%s(%s)\" % (str(self.name), \", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def", "condition self.trueValue = trueValue self.falseValue = falseValue def __str__(self): return \"IfExpression: %s %s", "if self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" % (str(self.name), \" \".join(args))", "accept(self, visitor): return visitor.accept(self) class BooleanValue(BaseNode): def __init__(self, value): self.value = value def", "str(self.cases)) class Undefined(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return \"Undefined\" class", "(str(self.condition), str(self.statements)) class For(BaseNode): def __init__(self, from_, to, statements): self.from_ = from_ self.to", "__init__(self, value, bit_size=32): self.value = value self.bit_size = bit_size def __len__(self): \"\"\" Return", "self.type = type_ self.left_expr = left_expr self.right_expr = right_expr def __str__(self): return \"%s", "%s\" % (str(self.expr), str(self.cases)) class Undefined(BaseNode): def __init__(self): self.reason = \"\" def __str__(self):", "%s\" % (str(self.statements), str(self.condition)) class While(BaseNode): def __init__(self, condition, statements): self.condition = condition", "def __init__(self, condition, if_statements, else_statements): self.condition = condition self.if_statements = if_statements self.else_statements =", "class While(BaseNode): def __init__(self, condition, statements): self.condition = condition self.statements = statements def", "% (str(self.name), \" \".join(args)) class MaskedBinary(BaseNode): def __init__(self, value): self.value = value def", "SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\" class Return(BaseNode): def __init__(self, value): self.value = value", "def __str__(self): return \"MaskedBinary: %s\" % (str(self.value)) class Ignore(BaseNode): def __str__(self): return \"Ignore\"", "the concatenation operator. \"\"\" return self.bit_size def __str__(self): return str(self.value) class List(BaseNode): def", "like the concatenation operator. \"\"\" return self.bit_size def __str__(self): return str(self.value) class List(BaseNode):", "self.left_expr = left_expr self.right_expr = right_expr def __str__(self): return \"%s %s %s\" %", "def __str__(self): return str(\"true\" if self.value else \"false\") class Identifier(BaseNode): def __init__(self, name):", "\"\"\" return self.bit_size def __str__(self): return str(self.value) class List(BaseNode): def __init__(self, values): self.values", "\"BitExtraction: %s %s\" % (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def __init__(self, name, expr1, expr2,", "expr): self.type = type_ self.expr = expr def __str__(self): return \"%s%s\" % (str(self.type),", "\"\" def __str__(self): return \"Unpredictable\" class See(BaseNode): def __init__(self, msg): self.msg = msg.strip('\"')", "def __init__(self): self.reason = \"\" def __str__(self): return \"Undefined\" class Unpredictable(BaseNode): def __init__(self):", "__init__(self, value): self.value = value def __str__(self): return str(\"true\" if self.value else \"false\")", "Ignore(BaseNode): def __str__(self): return \"Ignore\" class IfExpression(BaseNode): def __init__(self, condition, trueValue, falseValue): self.condition", "condition, statements): self.condition = condition self.statements = statements def __str__(self): return \"While: %s", "= if_statements self.else_statements = else_statements def __str__(self): return \"If: %s %s %s\" %", "class For(BaseNode): def __init__(self, from_, to, statements): self.from_ = from_ self.to = to", "self.type = type_ self.expr = expr def __str__(self): return \"%s%s\" % (str(self.type), str(self.expr))", "Case(BaseNode): def __init__(self, expr, cases): self.expr = expr self.cases = cases def __str__(self):", "= falseValue def __str__(self): return \"IfExpression: %s %s %s\" % (str(self.condition), str(self.trueValue), str(self.falseValue))", "= value def __str__(self): return str(\"true\" if self.value else \"false\") class Identifier(BaseNode): def", "return str(self.name) class NumberValue(BaseNode): def __init__(self, value, bit_size=32): self.value = value self.bit_size =", "condition, if_statements, else_statements): self.condition = condition self.if_statements = if_statements self.else_statements = else_statements def", "return \"Unpredictable\" class See(BaseNode): def __init__(self, msg): self.msg = msg.strip('\"') def __str__(self): return", "__str__(self): return \"RepeatUntil: %s %s\" % (str(self.statements), str(self.condition)) class While(BaseNode): def __init__(self, condition,", "type_, left_expr, right_expr): self.type = type_ self.left_expr = left_expr self.right_expr = right_expr def", "= statements def __str__(self): return \"For: %s %s %s\" % (str(self.from_), str(self.to), str(self.statements))", "def __str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\" class Return(BaseNode): def", "\"Ignore\" class IfExpression(BaseNode): def __init__(self, condition, trueValue, falseValue): self.condition = condition self.trueValue =", "def __init__(self, condition, trueValue, falseValue): self.condition = condition self.trueValue = trueValue self.falseValue =", "bit_size def __len__(self): \"\"\" Return the bitsize of the number. Important for things", "statements, condition): self.statements = statements self.condition = condition def __str__(self): return \"RepeatUntil: %s", "name, expr1, expr2, expr3): self.name = name self.expr1 = expr1 self.expr2 = expr2", "%s\" % (str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def __init__(self, value, statements): self.value =", "self.condition = condition self.trueValue = trueValue self.falseValue = falseValue def __str__(self): return \"IfExpression:", "__len__(self): return len(self.values) def __str__(self): return \"{%s}\" % \", \".join(map(str, self.values)) class UnaryExpression(BaseNode):", "\"While: %s %s\" % (str(self.condition), str(self.statements)) class For(BaseNode): def __init__(self, from_, to, statements):", "__init__(self, condition, if_statements, else_statements): self.condition = condition self.if_statements = if_statements self.else_statements = else_statements", "str(self.expr)) class BinaryExpression(BaseNode): def __init__(self, type_, left_expr, right_expr): self.type = type_ self.left_expr =", "\".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def __init__(self, statements, condition): self.statements = statements self.condition =", "return visitor.accept(self) class BooleanValue(BaseNode): def __init__(self, value): self.value = value def __str__(self): return", "str(self.statements)) class If(BaseNode): def __init__(self, condition, if_statements, else_statements): self.condition = condition self.if_statements =", "self.values)) class Enumeration(BaseNode): def __init__(self, values): self.values = values def __len__(self): return len(self.values)", "if_statements, else_statements): self.condition = condition self.if_statements = if_statements self.else_statements = else_statements def __str__(self):", "name_, arguments): self.name = name_ self.arguments = arguments def __str__(self): return \"%s(%s)\" %", "= identifier_ self.range = range_ def __str__(self): return \"BitExtraction: %s %s\" % (str(self.identifier),", "str(self.falseValue)) class CaseElement(BaseNode): def __init__(self, value, statements): self.value = value self.statements = statements", "type_ self.expr = expr def __str__(self): return \"%s%s\" % (str(self.type), str(self.expr)) class BinaryExpression(BaseNode):", "self.right_expr = right_expr def __str__(self): return \"%s %s %s\" % (str(self.type), str(self.left_expr), str(self.right_expr))", "self.else_statements)) class BitExtraction(BaseNode): def __init__(self, identifier_, range_): self.identifier = identifier_ self.range = range_", "self.identifier = identifier_ self.range = range_ def __str__(self): return \"BitExtraction: %s %s\" %", "= name self.expr1 = expr1 self.expr2 = expr2 self.expr3 = expr3 def __str__(self):", "expr2, expr3): self.name = name self.expr1 = expr1 self.expr2 = expr2 self.expr3 =", "= expr def __str__(self): return \"%s%s\" % (str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def __init__(self,", "def __str__(self): return \"%s%s\" % (str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def __init__(self, type_, left_expr,", "statements def __str__(self): return \"For: %s %s %s\" % (str(self.from_), str(self.to), str(self.statements)) class", "class Ignore(BaseNode): def __str__(self): return \"Ignore\" class IfExpression(BaseNode): def __init__(self, condition, trueValue, falseValue):", "expr self.cases = cases def __str__(self): return \"Case: %s %s\" % (str(self.expr), str(self.cases))", "\"false\") class Identifier(BaseNode): def __init__(self, name): self.name = name def __str__(self): return str(self.name)", "CaseElement(BaseNode): def __init__(self, value, statements): self.value = value self.statements = statements def __str__(self):", "class ProcedureCall(BaseNode): def __init__(self, name_, arguments): self.name = name_ self.arguments = arguments def", "str(self.value) class List(BaseNode): def __init__(self, values): self.values = values def __len__(self): return len(self.values)", "self.bit_size def __str__(self): return str(self.value) class List(BaseNode): def __init__(self, values): self.values = values", "return \"Undefined\" class Unpredictable(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return \"Unpredictable\"", "__init__(self): self.reason = \"\" def __str__(self): return \"Unpredictable\" class See(BaseNode): def __init__(self, msg):", "self.name = name self.expr1 = expr1 self.expr2 = expr2 self.expr3 = expr3 def", "statements self.condition = condition def __str__(self): return \"RepeatUntil: %s %s\" % (str(self.statements), str(self.condition))", "name_ self.arguments = arguments def __str__(self): return \"%s(%s)\" % (str(self.name), \", \".join(map(str, self.arguments)))", "IfExpression(BaseNode): def __init__(self, condition, trueValue, falseValue): self.condition = condition self.trueValue = trueValue self.falseValue", "= cases def __str__(self): return \"Case: %s %s\" % (str(self.expr), str(self.cases)) class Undefined(BaseNode):", "self.value else \"false\") class Identifier(BaseNode): def __init__(self, name): self.name = name def __str__(self):", "things like the concatenation operator. \"\"\" return self.bit_size def __str__(self): return str(self.value) class", "name def __str__(self): return str(self.name) class NumberValue(BaseNode): def __init__(self, value, bit_size=32): self.value =", "self.statements = statements def __str__(self): return \"While: %s %s\" % (str(self.condition), str(self.statements)) class", "% (str(self.condition), str(self.statements)) class For(BaseNode): def __init__(self, from_, to, statements): self.from_ = from_", "args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" % (str(self.name), \" \".join(args)) class MaskedBinary(BaseNode): def __init__(self, value):", "def __init__(self, type_, expr): self.type = type_ self.expr = expr def __str__(self): return", "= condition def __str__(self): return \"RepeatUntil: %s %s\" % (str(self.statements), str(self.condition)) class While(BaseNode):", "(str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def __init__(self, type_, left_expr, right_expr): self.type = type_ self.left_expr", "%s %s\" % (str(self.value), str(self.statements)) class Case(BaseNode): def __init__(self, expr, cases): self.expr =", "self.name = name def __str__(self): return str(self.name) class NumberValue(BaseNode): def __init__(self, value, bit_size=32):", "\"Unpredictable\" class See(BaseNode): def __init__(self, msg): self.msg = msg.strip('\"') def __str__(self): return \"See:", "self.else_statements = else_statements def __str__(self): return \"If: %s %s %s\" % (str(self.condition), map(str,", "self.to = to self.statements = statements def __str__(self): return \"For: %s %s %s\"", "return \"Ignore\" class IfExpression(BaseNode): def __init__(self, condition, trueValue, falseValue): self.condition = condition self.trueValue", "str(self.name) class NumberValue(BaseNode): def __init__(self, value, bit_size=32): self.value = value self.bit_size = bit_size", "self.bit_size = bit_size def __len__(self): \"\"\" Return the bitsize of the number. Important", "If(BaseNode): def __init__(self, condition, if_statements, else_statements): self.condition = condition self.if_statements = if_statements self.else_statements", "<filename>src/libspec/arm/scripts/ast/nodes.py<gh_stars>10-100 class BaseNode(object): def accept(self, visitor): return visitor.accept(self) class BooleanValue(BaseNode): def __init__(self, value):", "str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self, name_, arguments): self.name = name_ self.arguments =", "= \"\" def __str__(self): return \"Undefined\" class Unpredictable(BaseNode): def __init__(self): self.reason = \"\"", "def __init__(self, from_, to, statements): self.from_ = from_ self.to = to self.statements =", "(str(self.expr), str(self.cases)) class Undefined(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return \"Undefined\"", "See(BaseNode): def __init__(self, msg): self.msg = msg.strip('\"') def __str__(self): return \"See: %s\" %", "(str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self): return \"SubArchitectureDefined\"", "return \"MaskedBinary: %s\" % (str(self.value)) class Ignore(BaseNode): def __str__(self): return \"Ignore\" class IfExpression(BaseNode):", "= value self.statements = statements def __str__(self): return \"CaseElement: %s %s\" % (str(self.value),", "Undefined(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return \"Undefined\" class Unpredictable(BaseNode): def", "statements def __str__(self): return \"While: %s %s\" % (str(self.condition), str(self.statements)) class For(BaseNode): def", "to self.statements = statements def __str__(self): return \"For: %s %s %s\" % (str(self.from_),", "def __str__(self): return \"SubArchitectureDefined\" class Return(BaseNode): def __init__(self, value): self.value = value def", "__str__(self): return \"For: %s %s %s\" % (str(self.from_), str(self.to), str(self.statements)) class If(BaseNode): def", "% (str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self, name_, arguments): self.name = name_", "right_expr): self.type = type_ self.left_expr = left_expr self.right_expr = right_expr def __str__(self): return", "%s\" % (str(self.from_), str(self.to), str(self.statements)) class If(BaseNode): def __init__(self, condition, if_statements, else_statements): self.condition", "values def __len__(self): return len(self.values) def __str__(self): return \"{%s}\" % \", \".join(map(str, self.values))", "%s\" % (str(self.condition), map(str, self.if_statements), map(str, self.else_statements)) class BitExtraction(BaseNode): def __init__(self, identifier_, range_):", "def accept(self, visitor): return visitor.accept(self) class BooleanValue(BaseNode): def __init__(self, value): self.value = value", "cases): self.expr = expr self.cases = cases def __str__(self): return \"Case: %s %s\"", "class RepeatUntil(BaseNode): def __init__(self, statements, condition): self.statements = statements self.condition = condition def", "args = [str(self.expr1)] if self.expr2: args.append(str(self.expr2)) if self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" %", "self.value = value def __str__(self): return str(\"true\" if self.value else \"false\") class Identifier(BaseNode):", "%s %s\" % (str(self.identifier), str(self.range)) class ArrayAccess(BaseNode): def __init__(self, name, expr1, expr2, expr3):", "%s %s\" % (str(self.expr), str(self.cases)) class Undefined(BaseNode): def __init__(self): self.reason = \"\" def", "= bit_size def __len__(self): \"\"\" Return the bitsize of the number. Important for", "visitor): return visitor.accept(self) class BooleanValue(BaseNode): def __init__(self, value): self.value = value def __str__(self):", "def __str__(self): return \"RepeatUntil: %s %s\" % (str(self.statements), str(self.condition)) class While(BaseNode): def __init__(self,", "While(BaseNode): def __init__(self, condition, statements): self.condition = condition self.statements = statements def __str__(self):", "if_statements self.else_statements = else_statements def __str__(self): return \"If: %s %s %s\" % (str(self.condition),", "\"(%s)\" % \", \".join(map(str, self.values)) class Enumeration(BaseNode): def __init__(self, values): self.values = values", "class ArrayAccess(BaseNode): def __init__(self, name, expr1, expr2, expr3): self.name = name self.expr1 =", "__init__(self, expr, cases): self.expr = expr self.cases = cases def __str__(self): return \"Case:", "%s %s\" % (str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode): def __init__(self, name_, arguments): self.name", "\" \".join(args)) class MaskedBinary(BaseNode): def __init__(self, value): self.value = value def __str__(self): return", "values): self.values = values def __len__(self): return len(self.values) def __str__(self): return \"{%s}\" %", "class Undefined(BaseNode): def __init__(self): self.reason = \"\" def __str__(self): return \"Undefined\" class Unpredictable(BaseNode):", "%s %s\" % (str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def __init__(self, value, statements): self.value", "= values def __len__(self): return len(self.values) def __str__(self): return \"(%s)\" % \", \".join(map(str,", "\", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def __init__(self, statements, condition): self.statements = statements self.condition", "class Case(BaseNode): def __init__(self, expr, cases): self.expr = expr self.cases = cases def", "if self.value else \"false\") class Identifier(BaseNode): def __init__(self, name): self.name = name def", "% (str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self): return \"ImplementationDefined\" class SubArchitectureDefined(BaseNode): def __str__(self): return", "from_ self.to = to self.statements = statements def __str__(self): return \"For: %s %s", "% \", \".join(map(str, self.values)) class Enumeration(BaseNode): def __init__(self, values): self.values = values def", "\"MaskedBinary: %s\" % (str(self.value)) class Ignore(BaseNode): def __str__(self): return \"Ignore\" class IfExpression(BaseNode): def", "values): self.values = values def __len__(self): return len(self.values) def __str__(self): return \"(%s)\" %", "type_, expr): self.type = type_ self.expr = expr def __str__(self): return \"%s%s\" %", "self.condition = condition self.if_statements = if_statements self.else_statements = else_statements def __str__(self): return \"If:", "falseValue def __str__(self): return \"IfExpression: %s %s %s\" % (str(self.condition), str(self.trueValue), str(self.falseValue)) class", "For(BaseNode): def __init__(self, from_, to, statements): self.from_ = from_ self.to = to self.statements", "self.expr3: args.append(str(self.expr3)) return \"ArrayAccess: %s[%s]\" % (str(self.name), \" \".join(args)) class MaskedBinary(BaseNode): def __init__(self,", "self.value = value self.bit_size = bit_size def __len__(self): \"\"\" Return the bitsize of", "self.reason = \"\" def __str__(self): return \"Undefined\" class Unpredictable(BaseNode): def __init__(self): self.reason =", "class See(BaseNode): def __init__(self, msg): self.msg = msg.strip('\"') def __str__(self): return \"See: %s\"", "% (str(self.name), \", \".join(map(str, self.arguments))) class RepeatUntil(BaseNode): def __init__(self, statements, condition): self.statements =", "%s\" % (str(self.condition), str(self.statements)) class For(BaseNode): def __init__(self, from_, to, statements): self.from_ =", "right_expr def __str__(self): return \"%s %s %s\" % (str(self.type), str(self.left_expr), str(self.right_expr)) class ProcedureCall(BaseNode):", "return self.bit_size def __str__(self): return str(self.value) class List(BaseNode): def __init__(self, values): self.values =", "class BitExtraction(BaseNode): def __init__(self, identifier_, range_): self.identifier = identifier_ self.range = range_ def", "(str(self.statements), str(self.condition)) class While(BaseNode): def __init__(self, condition, statements): self.condition = condition self.statements =", "% (str(self.condition), str(self.trueValue), str(self.falseValue)) class CaseElement(BaseNode): def __init__(self, value, statements): self.value = value", "__init__(self, from_, to, statements): self.from_ = from_ self.to = to self.statements = statements", "= msg.strip('\"') def __str__(self): return \"See: %s\" % (str(self.msg)) class ImplementationDefined(BaseNode): def __str__(self):", "Return(BaseNode): def __init__(self, value): self.value = value def __str__(self): return \"Return: %s\" %", "expr def __str__(self): return \"%s%s\" % (str(self.type), str(self.expr)) class BinaryExpression(BaseNode): def __init__(self, type_,", "\".join(args)) class MaskedBinary(BaseNode): def __init__(self, value): self.value = value def __str__(self): return \"MaskedBinary:", "str(self.condition)) class While(BaseNode): def __init__(self, condition, statements): self.condition = condition self.statements = statements", "def __init__(self, value, bit_size=32): self.value = value self.bit_size = bit_size def __len__(self): \"\"\"", "= to self.statements = statements def __str__(self): return \"For: %s %s %s\" %", "else_statements): self.condition = condition self.if_statements = if_statements self.else_statements = else_statements def __str__(self): return", "\"CaseElement: %s %s\" % (str(self.value), str(self.statements)) class Case(BaseNode): def __init__(self, expr, cases): self.expr", "bit_size=32): self.value = value self.bit_size = bit_size def __len__(self): \"\"\" Return the bitsize", "msg): self.msg = msg.strip('\"') def __str__(self): return \"See: %s\" % (str(self.msg)) class ImplementationDefined(BaseNode):", "= type_ self.expr = expr def __str__(self): return \"%s%s\" % (str(self.type), str(self.expr)) class" ]
[ "return _('Update') return super().get_submit_button() def get_title(self): if self.style: return _('Update Style') return super().get_title()", "from django.views.generic import TemplateView from django.views.generic.edit import FormView import mimetypes from .models import", "action = '.' def get_action(self): return self.action def get_submit_button(self): return self.submit_button def get_delete_button(self):", "from django.views.generic.edit import FormView import mimetypes from .models import Style from .forms import", "}) return ims def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return context", "= super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] = self.style return data def get_action(self): if self.style:", "get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if effect_id and self.effect_name: from image_styles import models self.effect", "Http404 from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import staff_member_required", "context class EffectFormMixin: effect = None style = None title = _('Create Effect')", "self.title def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button']", "def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button = _('Delete')", "effect_id and self.effect_name: from image_styles import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def", "Effect') def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form = super().get_form(**kwargs)", "View from django.views.generic import TemplateView from django.views.generic.edit import FormView import mimetypes from .models", "= super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] = self.effect return data def get_submit_button(self): if self.effect:", "Found\") def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] = self.effect return data", "context = super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title']", "return _('Update') return super().get_submit_button() def get_title(self): if self.effect: return _('Update Effect') return super().get_title()", "form_class = get_effect_form_class(self.effect_name) if form_class: return form_class raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data", "def get_action(self): return self.action def get_submit_button(self): return self.submit_button def get_delete_button(self): return self.delete_button def", ") ims.append({ 'style':s, 'effects':effects, }) return ims def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles']", "= self.style return form def get_submit_button(self): if self.form_class != EffectForm: return _('Create') return", "data['instance'] = self.style return data def get_action(self): if self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id}", "HttpResponse, get_object_or_404 from django.http import Http404 from django.utils.decorators import method_decorator from django.contrib.auth.decorators import", "open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close() return r class ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button", "def form_valid(self,form): form.save() return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if self.style: self.style.delete() return HttpResponse(_('Style", ") self.request.method = 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect')", "from django.shortcuts import render, HttpResponse, get_object_or_404 from django.http import Http404 from django.utils.decorators import", "from .models import Style from .forms import EffectForm,StyleForm from .utils import get_effect_form_class,render_image class", "content_type = mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close() return r class", "return data def get_action(self): if self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create')", "self.style return form def get_submit_button(self): if self.form_class != EffectForm: return _('Create') return super().get_submit_button()", "s.get_effects() for i in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] = form(instance=effects[i]['object'])", "@method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def get_image_styles(self): ims = [] for s", "EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect') submit_button = _('Create') def get_form(self,**kwargs): form = super().get_form(**kwargs)", "kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save() return HttpResponse(_('Effect Created!'))", "self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class", "_('Create') def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if style_id: self.style =", "= mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close() return r class ModalForm(FormView):", "[] for s in Style.objects.all(): effects = s.get_effects() for i in range(len(effects)): form", "get_submit_button(self): return self.submit_button def get_delete_button(self): return self.delete_button def get_title(self): return self.title def get_context_data(self,**kwargs):", "def get_submit_button(self): if self.style: return _('Update') return super().get_submit_button() def get_title(self): if self.style: return", "if self.effect: data['instance'] = self.effect return data def get_submit_button(self): if self.effect: return _('Update')", "submit_button = _('Save') delete_button = '' title = _('Create') action = '.' def", "super().get_title() def form_valid(self,form): form.save() return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if self.style: self.style.delete() return", "'' title = _('Create') action = '.' def get_action(self): return self.action def get_submit_button(self):", "RenderImageView(View): def get(self,request,style_name,path): image = render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r", "Effect') submit_button = _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if", "self.style = get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs)", "data def get_submit_button(self): if self.effect: return _('Update') return super().get_submit_button() def get_title(self): if self.effect:", "return _('Create Effect') return super().get_title() def get_action(self): if self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id})", "def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id)", "def get_submit_button(self): if self.effect: return _('Update') return super().get_submit_button() def get_title(self): if self.effect: return", "None form_class = StyleForm def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if style_id: self.style =", "None title = _('Create Effect') submit_button = _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name')", "class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def get_image_styles(self): ims = [] for s in", "self.request.method = 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect') submit_button", "login_required from django.contrib.admin.views.decorators import staff_member_required from django.urls import reverse,reverse_lazy from django.utils.translation import ugettext_lazy", "title = _('Create Effect') submit_button = _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id", "_('Update') return super().get_submit_button() def get_title(self): if self.style: return _('Update Style') return super().get_title() def", "from django.contrib.admin.views.decorators import staff_member_required from django.urls import reverse,reverse_lazy from django.utils.translation import ugettext_lazy as", "super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] = self.style return data def get_action(self): if self.style: return", "if self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!')) class StyleFormMixin: style =", "= reverse( 'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s, 'effects':effects,", "def get_submit_button(self): return self.submit_button def get_delete_button(self): return self.delete_button def get_title(self): return self.title def", "data['instance'] = self.effect return data def get_submit_button(self): if self.effect: return _('Update') return super().get_submit_button()", "def get(self,request,style_name,path): image = render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r =", "def form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name}", "get_action(self): if self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} )", "Effect') return super().get_title() def get_action(self): if self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action", "def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form @method_decorator(staff_member_required(),name='dispatch') class EffectUpdateView(EffectFormMixin,ModalForm):", "def get_action(self): if self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def get_submit_button(self):", "return form_class raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance']", "return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form def", "raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] = self.effect", "StyleForm def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button =", "HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!')) class StyleFormMixin: style = None form_class = StyleForm", "effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() }", "super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class = EffectForm submit_button", "super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] = self.effect return data def get_submit_button(self): if self.effect: return", "Effect') submit_button = _('Create') def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return", "form = get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs", "self.style: return _('Update Style') return super().get_title() def form_valid(self,form): form.save() return HttpResponse(_('Style Created!')) def", "ims = [] for s in Style.objects.all(): effects = s.get_effects() for i in", "= self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class = EffectForm submit_button = _('Next')", "form.save() return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!')) return", "dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) effect_id", "failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def get_image_styles(self): ims = [] for", "get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class = get_effect_form_class(self.effect_name) if form_class: return form_class raise", "= self.kwargs.get('effect_id') if effect_id and self.effect_name: from image_styles import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id)", "'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save() return HttpResponse(_('Effect", "django.contrib.admin.views.decorators import staff_member_required from django.urls import reverse,reverse_lazy from django.utils.translation import ugettext_lazy as _", "HttpResponse(_('Delete failed!')) class StyleFormMixin: style = None form_class = StyleForm def dispatch(self,request,*args,**kwargs): style_id", "= self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs) def", "self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title'] = self.get_title() return context class", "r = HttpResponse(f,content_type=content_type[0]) f.close() return r class ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button =", "from django.urls import reverse,reverse_lazy from django.utils.translation import ugettext_lazy as _ from django.views import", "self.delete_button def get_title(self): return self.title def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action'] = self.get_action()", "Style.objects.all(): effects = s.get_effects() for i in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if form:", "return super().get_submit_button() def get_title(self): if self.effect: return _('Update Effect') return super().get_title() def get_action(self):", "title = _('Create') action = '.' def get_action(self): return self.action def get_submit_button(self): return", "Removed!')) return HttpResponse(_('Delete failed!')) class StyleFormMixin: style = None form_class = StyleForm def", "import Style from .forms import EffectForm,StyleForm from .utils import get_effect_form_class,render_image class RenderImageView(View): def", "self.style = get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if effect_id and self.effect_name: from image_styles import", "= '.' def get_action(self): return self.action def get_submit_button(self): return self.submit_button def get_delete_button(self): return", "def form_valid(self,form): form.save() return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if self.effect: self.effect.delete() return HttpResponse(_('Effect", "get_title(self): if self.form_class != EffectForm: return _('Create Effect') return super().get_title() def get_action(self): if", "get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] = self.style return data def get_action(self):", "kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def get_submit_button(self): if self.style: return _('Update') return super().get_submit_button() def", "get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button'] = self.get_delete_button()", "django.http import Http404 from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators", "data = super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] = self.style return data def get_action(self): if", "context = super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class =", "range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update',", "self.style: return _('Update') return super().get_submit_button() def get_title(self): if self.style: return _('Update Style') return", "HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def get_image_styles(self):", "= '' title = _('Create') action = '.' def get_action(self): return self.action def", "'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s, 'effects':effects, }) return ims def get_context_data(self,**kwargs): context", "if self.style: return _('Update Style') return super().get_title() def form_valid(self,form): form.save() return HttpResponse(_('Style Created!'))", "= get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if effect_id and self.effect_name: from image_styles import models", "dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return", "= 'image_styles/modal_form.html' submit_button = _('Save') delete_button = '' title = _('Create') action =", "effects = s.get_effects() for i in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form']", "reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title", ") return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save() return HttpResponse(_('Effect Created!')) def", "django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import staff_member_required from django.urls", "form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET'", "template_name = 'image_styles/modal_form.html' submit_button = _('Save') delete_button = '' title = _('Create') action", "title = _('Create Effect') submit_button = _('Create') def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style']", "from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import staff_member_required from", "effect_name = form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method", "mimetypes from .models import Style from .forms import EffectForm,StyleForm from .utils import get_effect_form_class,render_image", "super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] = self.style return data", "self.form_class = get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET' return", "def delete(self,*args,**kwargs): if self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!')) class StyleFormMixin:", "get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form def get_submit_button(self): if self.form_class", "get_title(self): if self.effect: return _('Update Effect') return super().get_title() def get_action(self): if self.style: return", "self.effect_name = self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) effect_id =", "delete(self,*args,**kwargs): if self.style: self.style.delete() return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView):", "= get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return", "reverse,reverse_lazy from django.utils.translation import ugettext_lazy as _ from django.views import View from django.views.generic", "= form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } )", "super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title'] = self.get_title()", "return context class EffectFormMixin: effect = None style = None title = _('Create", "django.utils.translation import ugettext_lazy as _ from django.views import View from django.views.generic import TemplateView", "Style from .forms import EffectForm,StyleForm from .utils import get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path):", "get_effect_form_class(self.effect_name) if form_class: return form_class raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs)", "return self.delete_button def get_title(self): return self.title def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action'] =", "reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def get_submit_button(self): if self.style: return _('Update') return", "return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def get_image_styles(self): ims =", "ugettext_lazy as _ from django.views import View from django.views.generic import TemplateView from django.views.generic.edit", "get_form_class(self): form_class = get_effect_form_class(self.effect_name) if form_class: return form_class raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs):", "get_title(self): if self.style: return _('Update Style') return super().get_title() def form_valid(self,form): form.save() return HttpResponse(_('Style", "get_object_or_404 from django.http import Http404 from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required", "_('Create') return super().get_submit_button() def get_title(self): if self.form_class != EffectForm: return _('Create Effect') return", "self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class = EffectForm submit_button = _('Next') title", "= get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if", "ims def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class", "= render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close() return", "def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style']", "data def get_action(self): if self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def", "} ) ims.append({ 'style':s, 'effects':effects, }) return ims def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs)", "self.style return data def get_action(self): if self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return", "return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] = self.style return", "_('Create') action = '.' def get_action(self): return self.action def get_submit_button(self): return self.submit_button def", "= _('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] =", "get_action(self): return self.action def get_submit_button(self): return self.submit_button def get_delete_button(self): return self.delete_button def get_title(self):", "ims.append({ 'style':s, 'effects':effects, }) return ims def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles'] =", "def get_submit_button(self): if self.form_class != EffectForm: return _('Create') return super().get_submit_button() def get_title(self): if", "= get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET' return super().get(self.request,style_id=self.style.id)", "i in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] =", "reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save() return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if", "if self.style: return _('Update') return super().get_submit_button() def get_title(self): if self.style: return _('Update Style')", "return self.title def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button'] = self.get_submit_button()", "if self.form_class != EffectForm: return _('Create') return super().get_submit_button() def get_title(self): if self.form_class !=", "image = render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close()", "self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs):", "data = super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] = self.effect return data def get_submit_button(self): if", "class EffectFormMixin: effect = None style = None title = _('Create Effect') submit_button", "return self.submit_button def get_delete_button(self): return self.delete_button def get_title(self): return self.title def get_context_data(self,**kwargs): context", "super().get_title() def get_action(self): if self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update',", "self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form):", ".models import Style from .forms import EffectForm,StyleForm from .utils import get_effect_form_class,render_image class RenderImageView(View):", "context['action'] = self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title'] = self.get_title() return", "= _('Create') def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form @method_decorator(staff_member_required(),name='dispatch')", "self.effect_name: from image_styles import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class", "self.style: self.style.delete() return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name =", "return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect') submit_button = _('Create') def", "if self.style: data['instance'] = self.style return data def get_action(self): if self.style: return reverse(", "in Style.objects.all(): effects = s.get_effects() for i in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if", "if self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def", "get_image_styles(self): ims = [] for s in Style.objects.all(): effects = s.get_effects() for i", "get_action(self): if self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def get_submit_button(self): if", "= StyleForm def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button", "in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] = reverse(", "= _('Select Effect') def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form", "django.urls import reverse,reverse_lazy from django.utils.translation import ugettext_lazy as _ from django.views import View", "return super().get_submit_button() def get_title(self): if self.style: return _('Update Style') return super().get_title() def form_valid(self,form):", "get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch')", "= get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs =", "'style':s, 'effects':effects, }) return ims def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles'] = self.get_image_styles()", "return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save() return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs):", "EffectForm: return _('Create Effect') return super().get_title() def get_action(self): if self.action == '.': return", "return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save()", "= s.get_effects() for i in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] =", "_('Create Effect') return super().get_title() def get_action(self): if self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return", "get(self,request,style_name,path): image = render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0])", "def get_title(self): if self.style: return _('Update Style') return super().get_title() def form_valid(self,form): form.save() return", "def get_action(self): if self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name}", "!= EffectForm: return _('Create Effect') return super().get_title() def get_action(self): if self.action == '.':", "from django.http import Http404 from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from", "self.effect.delete() return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!')) class StyleFormMixin: style = None form_class", "EffectForm submit_button = _('Next') title = _('Select Effect') def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id'))", "staff_member_required from django.urls import reverse,reverse_lazy from django.utils.translation import ugettext_lazy as _ from django.views", "= self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if effect_id and", "self.effect: data['instance'] = self.effect return data def get_submit_button(self): if self.effect: return _('Update') return", "'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect') submit_button = _('Create')", "Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] = self.effect return", "= HttpResponse(f,content_type=content_type[0]) f.close() return r class ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button = _('Save')", "def get_form_class(self): form_class = get_effect_form_class(self.effect_name) if form_class: return form_class raise Http404(\"Not Found\") def", "_('Update Effect') return super().get_title() def get_action(self): if self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} )", "return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if self.style: self.style.delete() return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete", "_('Create Effect') submit_button = _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id')", "= reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm):", "def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form def get_submit_button(self): if", "style_id: self.style = get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if effect_id and self.effect_name: from image_styles", "models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class = get_effect_form_class(self.effect_name) if form_class:", "import View from django.views.generic import TemplateView from django.views.generic.edit import FormView import mimetypes from", "return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!')) class StyleFormMixin: style = None form_class =", "super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class = get_effect_form_class(self.effect_name) if form_class: return form_class raise Http404(\"Not Found\")", "'effects':effects, }) return ims def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return", "Style') return super().get_title() def form_valid(self,form): form.save() return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if self.style:", "title = _('Select Effect') def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs):", "class RenderImageView(View): def get(self,request,style_name,path): image = render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb')", "= super().get_form(**kwargs) form.initial['style'] = self.style return form def get_submit_button(self): if self.form_class != EffectForm:", "import TemplateView from django.views.generic.edit import FormView import mimetypes from .models import Style from", "get_action(self): if self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form): effect_name =", "Effect') return super().get_title() def get_action(self): if self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return", "self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if", "import method_decorator from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import staff_member_required from django.urls import", "ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def get_image_styles(self): ims = [] for s in Style.objects.all():", "return ims def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch')", ".forms import EffectForm,StyleForm from .utils import get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path): image =", "reverse( 'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s, 'effects':effects, })", "if self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def get_submit_button(self): if self.style:", "import FormView import mimetypes from .models import Style from .forms import EffectForm,StyleForm from", "for s in Style.objects.all(): effects = s.get_effects() for i in range(len(effects)): form =", "= open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close() return r class ModalForm(FormView): template_name = 'image_styles/modal_form.html'", "= EffectForm submit_button = _('Next') title = _('Select Effect') def dispatch(self,request,*args,**kwargs): self.style =", "FormView import mimetypes from .models import Style from .forms import EffectForm,StyleForm from .utils", "= super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class = EffectForm", "def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] = self.style return data def", "return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete", "style = None title = _('Create Effect') submit_button = _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name", "class StyleFormMixin: style = None form_class = StyleForm def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id')", "reverse('image_styles:style_create') def get_submit_button(self): if self.style: return _('Update') return super().get_submit_button() def get_title(self): if self.style:", "context['styles'] = self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class = EffectForm submit_button =", "reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse( 'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save() return", "self.effect: return _('Update') return super().get_submit_button() def get_title(self): if self.effect: return _('Update Effect') return", "form.initial['style'] = self.style return form def get_submit_button(self): if self.form_class != EffectForm: return _('Create')", "submit_button = _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if style_id:", "from .utils import get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path): image = render_image(style_name,path) content_type =", "'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title =", "EffectFormMixin: effect = None style = None title = _('Create Effect') submit_button =", "if self.form_class != EffectForm: return _('Create Effect') return super().get_title() def get_action(self): if self.action", "import login_required from django.contrib.admin.views.decorators import staff_member_required from django.urls import reverse,reverse_lazy from django.utils.translation import", "r class ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button = _('Save') delete_button = '' title", "self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if effect_id and self.effect_name:", "self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def get_submit_button(self): if self.style: return", "from django.utils.translation import ugettext_lazy as _ from django.views import View from django.views.generic import", "if effect_id and self.effect_name: from image_styles import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs)", "def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm):", "import staff_member_required from django.urls import reverse,reverse_lazy from django.utils.translation import ugettext_lazy as _ from", "'.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name)", "EffectCreateInitView(ModalForm): form_class = EffectForm submit_button = _('Next') title = _('Select Effect') def dispatch(self,request,*args,**kwargs):", "form_class: return form_class raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.effect:", "return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class = EffectForm submit_button = _('Next') title =", "'image_styles/home.html' def get_image_styles(self): ims = [] for s in Style.objects.all(): effects = s.get_effects()", "kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save() return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if self.effect: self.effect.delete()", "method_decorator from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import staff_member_required from django.urls import reverse,reverse_lazy", "super().get_submit_button() def get_title(self): if self.style: return _('Update Style') return super().get_title() def form_valid(self,form): form.save()", "self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.style: data['instance']", "StyleFormMixin: style = None form_class = StyleForm def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if", "= [] for s in Style.objects.all(): effects = s.get_effects() for i in range(len(effects)):", "'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s, 'effects':effects, }) return", "import Http404 from django.utils.decorators import method_decorator from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import", "def delete(self,*args,**kwargs): if self.style: self.style.delete() return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class", "HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if self.style: self.style.delete() return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!'))", "django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import staff_member_required from django.urls import reverse,reverse_lazy from django.utils.translation", "= 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect') submit_button =", "_('Save') delete_button = '' title = _('Create') action = '.' def get_action(self): return", "return self.action def get_submit_button(self): return self.submit_button def get_delete_button(self): return self.delete_button def get_title(self): return", "@method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class = EffectForm submit_button = _('Next') title = _('Select Effect')", "return HttpResponse(_('Delete failed!')) class StyleFormMixin: style = None form_class = StyleForm def dispatch(self,request,*args,**kwargs):", "None style = None title = _('Create Effect') submit_button = _('Create') def dispatch(self,request,*args,**kwargs):", "delete(self,*args,**kwargs): if self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!')) class StyleFormMixin: style", "if style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data", "return reverse('image_styles:style_create') def get_submit_button(self): if self.style: return _('Update') return super().get_submit_button() def get_title(self): if", ") def form_valid(self,form): form.save() return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if self.effect: self.effect.delete() return", "def get_title(self): if self.effect: return _('Update Effect') return super().get_title() def get_action(self): if self.style:", "= _('Create Effect') submit_button = _('Create') def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] =", "Created!')) def delete(self,*args,**kwargs): if self.style: self.style.delete() return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch')", "self.effect: return _('Update Effect') return super().get_title() def get_action(self): if self.style: return reverse( 'image_styles:effect_create',", "template_name = 'image_styles/home.html' def get_image_styles(self): ims = [] for s in Style.objects.all(): effects", "from django.views import View from django.views.generic import TemplateView from django.views.generic.edit import FormView import", "Created!')) def delete(self,*args,**kwargs): if self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!')) class", "return data def get_submit_button(self): if self.effect: return _('Update') return super().get_submit_button() def get_title(self): if", "return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def get_submit_button(self): if self.style: return _('Update')", "return r class ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button = _('Save') delete_button = ''", "class ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button = _('Save') delete_button = '' title =", "def get_image_styles(self): ims = [] for s in Style.objects.all(): effects = s.get_effects() for", "self.kwargs.get('effect_id') if effect_id and self.effect_name: from image_styles import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return", "form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} )", "self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title'] = self.get_title() return context class EffectFormMixin: effect =", "import get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path): image = render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f", "self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!')) class StyleFormMixin: style = None", "get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.style:", "super().get_form(**kwargs) form.initial['style'] = self.style return form @method_decorator(staff_member_required(),name='dispatch') class EffectUpdateView(EffectFormMixin,ModalForm): pass @method_decorator(staff_member_required(),name='dispatch') class StyleView(StyleFormMixin,ModalForm):", "import render, HttpResponse, get_object_or_404 from django.http import Http404 from django.utils.decorators import method_decorator from", "if self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form): effect_name = form.cleaned_data.get('effect')", "self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style", "dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] =", "style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data =", "HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def get_image_styles(self): ims = []", "form_class = StyleForm def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id)", "'image_styles:effect_update', kwargs={'effect':self.effect.id,'effect_name':self.effect_name} ) def form_valid(self,form): form.save() return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if self.effect:", "return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action", "form = super().get_form(**kwargs) form.initial['style'] = self.style return form def get_submit_button(self): if self.form_class !=", "if self.effect: return _('Update') return super().get_submit_button() def get_title(self): if self.effect: return _('Update Effect')", "== '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class =", "get_submit_button(self): if self.effect: return _('Update') return super().get_submit_button() def get_title(self): if self.effect: return _('Update", "HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!')) return HttpResponse(_('Delete failed!'))", "get_title(self): return self.title def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button'] =", "image_styles import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class = get_effect_form_class(self.effect_name)", "= _('Create') action = '.' def get_action(self): return self.action def get_submit_button(self): return self.submit_button", "= get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class = get_effect_form_class(self.effect_name) if form_class: return form_class", "from image_styles import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class =", "get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form @method_decorator(staff_member_required(),name='dispatch') class EffectUpdateView(EffectFormMixin,ModalForm): pass", "effect = None style = None title = _('Create Effect') submit_button = _('Create')", "kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create", "f.close() return r class ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button = _('Save') delete_button =", "for i in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action']", "f = open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close() return r class ModalForm(FormView): template_name =", "super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect') submit_button = _('Create') def get_form(self,**kwargs):", "form_class raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] =", "mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close() return r class ModalForm(FormView): template_name", "return _('Update Effect') return super().get_title() def get_action(self): if self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name}", "super().get_submit_button() def get_title(self): if self.form_class != EffectForm: return _('Create Effect') return super().get_title() def", "super().get_form(**kwargs) form.initial['style'] = self.style return form def get_submit_button(self): if self.form_class != EffectForm: return", "= self.get_title() return context class EffectFormMixin: effect = None style = None title", "style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) self.delete_button = _('Delete') return super().dispatch(request,*args,**kwargs)", "= self.style return data def get_action(self): if self.style: return reverse( 'image_styles:style_update', kwargs={'style_id':self.style.id} )", "self.style: data['instance'] = self.style return data def get_action(self): if self.style: return reverse( 'image_styles:style_update',", "return super().get_title() def get_action(self): if self.style: return reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':self.effect_name} ) return reverse(", "django.views import View from django.views.generic import TemplateView from django.views.generic.edit import FormView import mimetypes", "= _('Next') title = _('Select Effect') def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs)", "render, HttpResponse, get_object_or_404 from django.http import Http404 from django.utils.decorators import method_decorator from django.contrib.auth.decorators", "as _ from django.views import View from django.views.generic import TemplateView from django.views.generic.edit import", ") return reverse('image_styles:style_create') def get_submit_button(self): if self.style: return _('Update') return super().get_submit_button() def get_title(self):", "get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] = self.effect return data def get_submit_button(self):", "from django.contrib.auth.decorators import login_required from django.contrib.admin.views.decorators import staff_member_required from django.urls import reverse,reverse_lazy from", "context['title'] = self.get_title() return context class EffectFormMixin: effect = None style = None", "if self.style: self.style.delete() return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name", "= None form_class = StyleForm def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if style_id: self.style", "HttpResponse(f,content_type=content_type[0]) f.close() return r class ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button = _('Save') delete_button", "= form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method =", "import mimetypes from .models import Style from .forms import EffectForm,StyleForm from .utils import", "form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({", "_('Delete') return super().dispatch(request,*args,**kwargs) def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.style: data['instance'] = self.style", "context['delete_button'] = self.get_delete_button() context['title'] = self.get_title() return context class EffectFormMixin: effect = None", "get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['styles'] = self.get_image_styles() return context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class", "get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form", "and self.effect_name: from image_styles import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self):", "if form_class: return form_class raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if", "import ugettext_lazy as _ from django.views import View from django.views.generic import TemplateView from", "= None title = _('Create Effect') submit_button = _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name =", "form.save() return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if self.style: self.style.delete() return HttpResponse(_('Style Removed!')) return", "class EffectCreateInitView(ModalForm): form_class = EffectForm submit_button = _('Next') title = _('Select Effect') def", "return super().get_title() def form_valid(self,form): form.save() return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if self.style: self.style.delete()", "self.action def get_submit_button(self): return self.submit_button def get_delete_button(self): return self.delete_button def get_title(self): return self.title", "_('Create') def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form @method_decorator(staff_member_required(),name='dispatch') class", "= self.get_delete_button() context['title'] = self.get_title() return context class EffectFormMixin: effect = None style", "submit_button = _('Next') title = _('Select Effect') def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return", "Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def get_image_styles(self): ims", "@method_decorator(staff_member_required(),name='dispatch') class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect') submit_button = _('Create') def get_form(self,**kwargs): form", "self.action def form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action = reverse( 'image_styles:effect_create',", "self.submit_button def get_delete_button(self): return self.delete_button def get_title(self): return self.title def get_context_data(self,**kwargs): context =", "context['submit_button'] = self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title'] = self.get_title() return context class EffectFormMixin:", "{ 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s, 'effects':effects, }) return ims def get_context_data(self,**kwargs):", "django.views.generic import TemplateView from django.views.generic.edit import FormView import mimetypes from .models import Style", "import models self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class = get_effect_form_class(self.effect_name) if", "_('Update Style') return super().get_title() def form_valid(self,form): form.save() return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if", "get_effect_form_class(effect_model=effects[i]['object']) if form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs = {", "def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button'] =", "def get_title(self): if self.form_class != EffectForm: return _('Create Effect') return super().get_title() def get_action(self):", "import reverse,reverse_lazy from django.utils.translation import ugettext_lazy as _ from django.views import View from", "get_delete_button(self): return self.delete_button def get_title(self): return self.title def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action']", "'image_styles:style_update', kwargs={'style_id':self.style.id} ) return reverse('image_styles:style_create') def get_submit_button(self): if self.style: return _('Update') return super().get_submit_button()", "super().get_title() def get_action(self): if self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form):", "if form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id,", "return _('Create') return super().get_submit_button() def get_title(self): if self.form_class != EffectForm: return _('Create Effect')", "style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if effect_id", "context @method_decorator(staff_member_required(),name='dispatch') class EffectCreateInitView(ModalForm): form_class = EffectForm submit_button = _('Next') title = _('Select", "return _('Update Style') return super().get_title() def form_valid(self,form): form.save() return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs):", "self.effect return data def get_submit_button(self): if self.effect: return _('Update') return super().get_submit_button() def get_title(self):", "render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f = open(image.image.path,'rb') r = HttpResponse(f,content_type=content_type[0]) f.close() return r", "s in Style.objects.all(): effects = s.get_effects() for i in range(len(effects)): form = get_effect_form_class(effect_model=effects[i]['object'])", "TemplateView from django.views.generic.edit import FormView import mimetypes from .models import Style from .forms", "form_valid(self,form): form.save() return HttpResponse(_('Effect Created!')) def delete(self,*args,**kwargs): if self.effect: self.effect.delete() return HttpResponse(_('Effect Removed!'))", "= self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title'] = self.get_title() return context", "def get_title(self): return self.title def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button']", "self.effect = get_object_or_404(getattr(models,self.effect_name),id=effect_id) return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class = get_effect_form_class(self.effect_name) if form_class: return", "def get_form_kwargs(self,*args,**kwargs): data = super().get_form_kwargs(*args,**kwargs) if self.effect: data['instance'] = self.effect return data def", "effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s,", "kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s, 'effects':effects, }) return ims", "django.shortcuts import render, HttpResponse, get_object_or_404 from django.http import Http404 from django.utils.decorators import method_decorator", "EffectForm: return _('Create') return super().get_submit_button() def get_title(self): if self.form_class != EffectForm: return _('Create", "class EffectCreateView(EffectFormMixin,ModalForm): title = _('Create Effect') submit_button = _('Create') def get_form(self,**kwargs): form =", "django.views.generic.edit import FormView import mimetypes from .models import Style from .forms import EffectForm,StyleForm", "if self.effect: return _('Update Effect') return super().get_title() def get_action(self): if self.style: return reverse(", "form_valid(self,form): form.save() return HttpResponse(_('Style Created!')) def delete(self,*args,**kwargs): if self.style: self.style.delete() return HttpResponse(_('Style Removed!'))", "'.' def get_action(self): return self.action def get_submit_button(self): return self.submit_button def get_delete_button(self): return self.delete_button", "_('Update') return super().get_submit_button() def get_title(self): if self.effect: return _('Update Effect') return super().get_title() def", "from .forms import EffectForm,StyleForm from .utils import get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path): image", "return super().get_title() def get_action(self): if self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def", "form.initial['style'] = self.style return form @method_decorator(staff_member_required(),name='dispatch') class EffectUpdateView(EffectFormMixin,ModalForm): pass @method_decorator(staff_member_required(),name='dispatch') class StyleView(StyleFormMixin,ModalForm): pass", "form: effects[i]['form'] = form(instance=effects[i]['object']) effects[i]['action'] = reverse( 'image_styles:effect_update', kwargs = { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name()", "super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form def get_submit_button(self):", "def get_action(self): if self.action == '.': return reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form): effect_name", "= _('Save') delete_button = '' title = _('Create') action = '.' def get_action(self):", "= _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if style_id: self.style", "form def get_submit_button(self): if self.form_class != EffectForm: return _('Create') return super().get_submit_button() def get_title(self):", "import EffectForm,StyleForm from .utils import get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path): image = render_image(style_name,path)", "super().get_submit_button() def get_title(self): if self.effect: return _('Update Effect') return super().get_title() def get_action(self): if", "= self.kwargs.get('effect_name') style_id = self.kwargs.get('style_id') if style_id: self.style = get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id')", "reverse('image_styles:effect_create_init',kwargs={'style_id':self.style.id}) return self.action def form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action =", "failed!')) class StyleFormMixin: style = None form_class = StyleForm def dispatch(self,request,*args,**kwargs): style_id =", "= _('Create Effect') submit_button = _('Create') def dispatch(self,request,*args,**kwargs): self.effect_name = self.kwargs.get('effect_name') style_id =", "'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s, 'effects':effects, }) return ims def get_context_data(self,**kwargs): context =", "get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path): image = render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path) f =", "return form def get_submit_button(self): if self.form_class != EffectForm: return _('Create') return super().get_submit_button() def", "return super().get_submit_button() def get_title(self): if self.form_class != EffectForm: return _('Create Effect') return super().get_title()", "!= EffectForm: return _('Create') return super().get_submit_button() def get_title(self): if self.form_class != EffectForm: return", "get_submit_button(self): if self.form_class != EffectForm: return _('Create') return super().get_submit_button() def get_title(self): if self.form_class", "_('Create Effect') submit_button = _('Create') def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style", "= super().get_form(**kwargs) form.initial['style'] = self.style return form @method_decorator(staff_member_required(),name='dispatch') class EffectUpdateView(EffectFormMixin,ModalForm): pass @method_decorator(staff_member_required(),name='dispatch') class", "return super().dispatch(request,*args,**kwargs) def get_form_class(self): form_class = get_effect_form_class(self.effect_name) if form_class: return form_class raise Http404(\"Not", "return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html' def", "_('Select Effect') def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def get_form(self,**kwargs): form =", "= self.effect return data def get_submit_button(self): if self.effect: return _('Update') return super().get_submit_button() def", "EffectForm,StyleForm from .utils import get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path): image = render_image(style_name,path) content_type", "'image_styles/modal_form.html' submit_button = _('Save') delete_button = '' title = _('Create') action = '.'", "self.form_class != EffectForm: return _('Create Effect') return super().get_title() def get_action(self): if self.action ==", "self.get_title() return context class EffectFormMixin: effect = None style = None title =", "form_class = EffectForm submit_button = _('Next') title = _('Select Effect') def dispatch(self,request,*args,**kwargs): self.style", "delete_button = '' title = _('Create') action = '.' def get_action(self): return self.action", "get_submit_button(self): if self.style: return _('Update') return super().get_submit_button() def get_title(self): if self.style: return _('Update", "= get_effect_form_class(self.effect_name) if form_class: return form_class raise Http404(\"Not Found\") def get_form_kwargs(self,*args,**kwargs): data =", "_('Next') title = _('Select Effect') def dispatch(self,request,*args,**kwargs): self.style = get_object_or_404(Style,id=self.kwargs.get('style_id')) return super().dispatch(request,*args,**kwargs) def", "submit_button = _('Create') def get_form(self,**kwargs): form = super().get_form(**kwargs) form.initial['style'] = self.style return form", "effect_id = self.kwargs.get('effect_id') if effect_id and self.effect_name: from image_styles import models self.effect =", "= 'image_styles/home.html' def get_image_styles(self): ims = [] for s in Style.objects.all(): effects =", "form = super().get_form(**kwargs) form.initial['style'] = self.style return form @method_decorator(staff_member_required(),name='dispatch') class EffectUpdateView(EffectFormMixin,ModalForm): pass @method_decorator(staff_member_required(),name='dispatch')", "return self.action def form_valid(self,form): effect_name = form.cleaned_data.get('effect') self.form_class = get_effect_form_class(effect_name=effect_name) self.action = reverse(", "def get_delete_button(self): return self.delete_button def get_title(self): return self.title def get_context_data(self,**kwargs): context = super().get_context_data(**kwargs)", "_ from django.views import View from django.views.generic import TemplateView from django.views.generic.edit import FormView", "self.get_delete_button() context['title'] = self.get_title() return context class EffectFormMixin: effect = None style =", ".utils import get_effect_form_class,render_image class RenderImageView(View): def get(self,request,style_name,path): image = render_image(style_name,path) content_type = mimetypes.guess_type(image.image.path)", "= { 'effect_id':effects[i]['object'].id, 'effect_name':effects[i]['object'].get_name() } ) ims.append({ 'style':s, 'effects':effects, }) return ims def", "self.style.delete() return HttpResponse(_('Style Removed!')) return HttpResponse(_('Delete failed!')) @method_decorator(staff_member_required(),name='dispatch') class ManageImageStylesView(TemplateView): template_name = 'image_styles/home.html'", "= self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title'] = self.get_title() return context class EffectFormMixin: effect", "if style_id: self.style = get_object_or_404(Style,id=style_id) effect_id = self.kwargs.get('effect_id') if effect_id and self.effect_name: from", "self.form_class != EffectForm: return _('Create') return super().get_submit_button() def get_title(self): if self.form_class != EffectForm:", "= super().get_context_data(**kwargs) context['action'] = self.get_action() context['submit_button'] = self.get_submit_button() context['delete_button'] = self.get_delete_button() context['title'] =", "self.action = reverse( 'image_styles:effect_create', kwargs={'style_id':self.style.id,'effect_name':effect_name} ) self.request.method = 'GET' return super().get(self.request,style_id=self.style.id) @method_decorator(staff_member_required(),name='dispatch') class", "ModalForm(FormView): template_name = 'image_styles/modal_form.html' submit_button = _('Save') delete_button = '' title = _('Create')", "= None style = None title = _('Create Effect') submit_button = _('Create') def", "style = None form_class = StyleForm def dispatch(self,request,*args,**kwargs): style_id = self.kwargs.get('style_id') if style_id:" ]
[ "from math import pi system = System() pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system) lA", "= [initialvalues[item] for item in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for", "preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd", "stretch2_s = (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB)", "qO: 5*pi/180, qO_d: 0, qA: -0.89, qA_d: 0, qB: -2.64, qB_d: 0, qC:", "system.get_state_variables() ini0 = [initialvalues[item] for item in statevariables] N = Frame('N',system) O =", "[initialvalues[item] for item in statevariables] N = Frame('N',system) O = Frame('O',system) A =", "[initialvalues[item] for item in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for item", "Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b = Constant(1e0,'b',system)", "pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip = pAB + lB*B.x #vBtip", "ini.append(result[item]) else: ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x", "from pynamics.particle import Particle import pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy import", "Body from pynamics.dyadic import Dyadic from pynamics.output import Output,PointsOutput from pynamics.particle import Particle", "O.get_w_to(A) wAB = A.get_w_to(B) wOC = O.get_w_to(C) wCD = C.get_w_to(D) wBD = B.get_w_to(D)", "#vBtip = pBtip.time_derivative(N,system) pCD = pOC + lC*C.x pDtip = pCD + lD*D.x", "Output,PointsOutput from pynamics.particle import Particle import pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy", "for item in system.get_state_variables(): if item in variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points =", "on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s =", "Constant(1,'lE',system) mO = Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC = Constant(.1,'mC',system)", "variables = [qO, qA, qB, qC, qD] guess = [initialvalues[item] for item in", "= Frame('D',system) E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin", "= Frame('B',system) C = Frame('C',system) D = Frame('D',system) E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system)", "#BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE", "Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD = Constant(.1,'mD',system)", "= Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD =", "pynamics from pynamics.frame import Frame from pynamics.variable_types import Differentiable,Constant from pynamics.system import System", "Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial = 0", "import sympy import numpy import matplotlib.pyplot as plt plt.ion() from math import pi", "from pynamics.body import Body from pynamics.dyadic import Dyadic from pynamics.output import Output,PointsOutput from", ".1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq = [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative()", "} statevariables = system.get_state_variables() ini0 = [initialvalues[item] for item in statevariables] N =", "qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x: 0, x_d:", "qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x: 0, x_d: .5, y: 2, y_d: 0, qO:", "= Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y) stretch1_s =", "k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x = [0,5,5,7,7,9,9,10]", "for full license. \"\"\" import pynamics from pynamics.frame import Frame from pynamics.variable_types import", "= O.get_w_to(C) wCD = C.get_w_to(D) wBD = B.get_w_to(D) wOE = O.get_w_to(E) BodyO =", "pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy import numpy import matplotlib.pyplot as plt", "= constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for item in system.get_state_variables(): if item in variables:", "= numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4 =", "-pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA)", "5*pi/180, qO_d: 0, qA: -0.89, qA_d: 0, qB: -2.64, qB_d: 0, qC: -pi+0.89,", "x_d: .5, y: 2, y_d: 0, qO: 5*pi/180, qO_d: 0, qA: -0.89, qA_d:", "ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1", "import Differentiable,Constant from pynamics.system import System from pynamics.body import Body from pynamics.dyadic import", "= Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial = 0 tfinal =", "Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system)", "system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE", "Frame('D',system) E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin =", "ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq =", "Dyadic from pynamics.output import Output,PointsOutput from pynamics.particle import Particle import pynamics.integration from pynamics.constraint", "pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2", "system.getPEGravity(0*N.x) - system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time()", "lD = Constant(1,'lD',system) lE = Constant(1,'lE',system) mO = Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB", "[pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO, qA, qB, qC,", "= Constant(9.81,'g',system) b = Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle =", "0, qC: -pi+0.89, qC_d: 0, qD: -pi+2.64, qD_d: 0, qE: 0, qE_d: 0,", "constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for item in system.get_state_variables(): if item in variables: ini.append(result[item])", "= [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y = points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure() for", "Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC')", "Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE = Constant(1,'lE',system) mO = Constant(2,'mO',system) mA = Constant(.1,'mA',system)", "Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y)", "import Body from pynamics.dyadic import Dyadic from pynamics.output import Output,PointsOutput from pynamics.particle import", "eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item in eq] eq_dd= [item.time_derivative() for item in eq_d]", "pynamics.dyadic import Dyadic from pynamics.output import Output,PointsOutput from pynamics.particle import Particle import pynamics.integration", "pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB = A.get_w_to(B) wOC = O.get_w_to(C) wCD = C.get_w_to(D)", "= Frame('C',system) D = Frame('D',system) E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system)", "= system.get_state_variables() ini0 = [initialvalues[item] for item in statevariables] N = Frame('N',system) O", "wOE = O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC", "in system.get_state_variables(): if item in variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values)", "[] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item in eq] eq_dd= [item.time_derivative() for item in", "E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB =", "qD_d: 0, qE: 0, qE_d: 0, } statevariables = system.get_state_variables() ini0 = [initialvalues[item]", "Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system)", "Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial = 0 tfinal = 10", "Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system)", "qO_d: 0, qA: -0.89, qA_d: 0, qB: -2.64, qB_d: 0, qC: -pi+0.89, qC_d:", "pOC + lC*C.x pDtip = pCD + lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs =", "wOA = O.get_w_to(A) wAB = A.get_w_to(B) wOC = O.get_w_to(C) wCD = C.get_w_to(D) wBD", "pBtip = pAB + lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD = pOC + lC*C.x", "Frame('N',system) O = Frame('O',system) A = Frame('A',system) B = Frame('B',system) C = Frame('C',system)", "= C.get_w_to(D) wBD = B.get_w_to(D) wOE = O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA =", "pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA = O.get_w_to(A)", "[0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) #", "energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y", "Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system)", "#torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y =", "qA, qB, qC, qD] guess = [initialvalues[item] for item in variables] result =", "Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system)", "= PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 =", "B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC =", "[] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) # f,ma = system.getdynamics()", "func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE =", "b_ankle = Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial", "pDtip = pCD + lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y))", "points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure() for item in y[::30]: plt.plot(*(item.T)) #points.animate(fps = 30,", "+ lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD = pOC + lC*C.x pDtip = pCD", "from pynamics.variable_types import Differentiable,Constant from pynamics.system import System from pynamics.body import Body from", "g = Constant(9.81,'g',system) b = Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle", "pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB = A.get_w_to(B) wOC = O.get_w_to(C)", "= Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD =", "plt plt.ion() from math import pi system = System() pynamics.set_system(__name__,system) tol=1e-5 lO =", "[item.time_derivative() for item in eq] eq_dd= [item.time_derivative() for item in eq_d] eq_dd_scalar =", "item in eq] eq_dd= [item.time_derivative() for item in eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x))", "\"\"\" import pynamics from pynamics.frame import Frame from pynamics.variable_types import Differentiable,Constant from pynamics.system", "I_leg = Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b = Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle", "pynamics.frame import Frame from pynamics.variable_types import Differentiable,Constant from pynamics.system import System from pynamics.body", "qE: 0, qE_d: 0, } statevariables = system.get_state_variables() ini0 = [initialvalues[item] for item", "import Particle import pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy import numpy import", "energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points)", "= pBtip.time_derivative(N,system) pCD = pOC + lC*C.x pDtip = pCD + lD*D.x points", "import pynamics.time_series x = [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width =", "eq_dd= [item.time_derivative() for item in eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c =", "utf-8 -*- \"\"\" Written by <NAME> Email: danaukes<at>gmail.com Please see LICENSE for full", "system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series", "O = Frame('O',system) A = Frame('A',system) B = Frame('B',system) C = Frame('C',system) D", "{my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE = system.getPEGravity(0*N.x) - system.getPESprings() energy = Output([KE-PE,toeforce,heelforce])", "full license. \"\"\" import pynamics from pynamics.frame import Frame from pynamics.variable_types import Differentiable,Constant", "= Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial =", "qB, qC, qD] guess = [initialvalues[item] for item in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values)", "Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial = 0 tfinal = 10 tstep = 1/30", "= Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE =", "points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO, qA,", "= Constant(1,'lD',system) lE = Constant(1,'lE',system) mO = Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB =", "= k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2)", "= [] for item in system.get_state_variables(): if item in variables: ini.append(result[item]) else: ini.append(initialvalues[item])", "pi system = System() pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB", "0, x_d: .5, y: 2, y_d: 0, qO: 5*pi/180, qO_d: 0, qA: -0.89,", "pE1 = pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA", "= system.get_KE() PE = system.getPEGravity(0*N.x) - system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot", "tstep = 1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3", "Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points =", "= pAB + lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD = pOC + lC*C.x pDtip", "system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1)", "math import pi system = System() pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system) lA =", "Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB')", "qC, qD] guess = [initialvalues[item] for item in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini", "qA_d: 0, qB: -2.64, qB_d: 0, qC: -pi+0.89, qC_d: 0, qD: -pi+2.64, qD_d:", "coding: utf-8 -*- \"\"\" Written by <NAME> Email: danaukes<at>gmail.com Please see LICENSE for", "item in statevariables] N = Frame('N',system) O = Frame('O',system) A = Frame('A',system) B", "plt.ion() from math import pi system = System() pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system)", "0, qD: -pi+2.64, qD_d: 0, qE: 0, qE_d: 0, } statevariables = system.get_state_variables()", "lC*C.x pDtip = pCD + lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x))", "lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD = pOC + lC*C.x pDtip = pCD +", "Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system)", "PE = system.getPEGravity(0*N.x) - system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque])", "import System from pynamics.body import Body from pynamics.dyadic import Dyadic from pynamics.output import", "= pOA+lA*A.x pBtip = pAB + lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD = pOC", "system.add_constraint(c) # f,ma = system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol)", "pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2 =", "= 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip", "= [qO, qA, qB, qC, qD] guess = [initialvalues[item] for item in variables]", "k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce", "= Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5 =", "# eq = [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item in eq] eq_dd= [item.time_derivative()", "# f,ma = system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE", "qB_d: 0, qC: -pi+0.89, qC_d: 0, qD: -pi+2.64, qD_d: 0, qE: 0, qE_d:", "pAB + lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD = pOC + lC*C.x pDtip =", "[] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO, qA, qB, qC, qD] guess =", "Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC)", "x: 0, x_d: .5, y: 2, y_d: 0, qO: 5*pi/180, qO_d: 0, qA:", "lB = Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE = Constant(1,'lE',system) mO", "= pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip = pAB + lB*B.x", "mD = Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g", "C.get_w_to(D) wBD = B.get_w_to(D) wOE = O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system)", "= O.get_w_to(A) wAB = A.get_w_to(B) wOC = O.get_w_to(C) wCD = C.get_w_to(D) wBD =", "= O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC =", "= my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq = [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item", "constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x vE1", "= [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque =", "pCD = pOC + lC*C.x pDtip = pCD + lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip]", "= Frame('O',system) A = Frame('A',system) B = Frame('B',system) C = Frame('C',system) D =", "<NAME> Email: danaukes<at>gmail.com Please see LICENSE for full license. \"\"\" import pynamics from", "stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on", "c.linearize(0) system.add_constraint(c) # f,ma = system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2})", "wBD = B.get_w_to(D) wOE = O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB", "= (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y)", "pBtip.time_derivative(N,system) pCD = pOC + lC*C.x pDtip = pCD + lD*D.x points =", "for item in eq] eq_dd= [item.time_derivative() for item in eq_d] eq_dd_scalar = []", "eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) # f,ma = system.getdynamics() func1 =", "qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x: 0, x_d: .5, y: 2,", "pAB = pOA+lA*A.x pBtip = pAB + lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD =", "= Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main =", "-0.89, qA_d: 0, qB: -2.64, qB_d: 0, qC: -pi+0.89, qC_d: 0, qD: -pi+2.64,", "b = Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque", "sympy import numpy import matplotlib.pyplot as plt plt.ion() from math import pi system", "import KinematicConstraint,AccelerationConstraint import sympy import numpy import matplotlib.pyplot as plt plt.ion() from math", "system.addforce(-torque*O.z,wOC) # eq = [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item in eq] eq_dd=", "eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) # f,ma = system.getdynamics() func1", "pOA = pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip = pAB +", "= 1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3 =", "-pi+0.89, qC_d: 0, qD: -pi+2.64, qD_d: 0, qE: 0, qE_d: 0, } statevariables", "Frame('O',system) A = Frame('A',system) B = Frame('B',system) C = Frame('C',system) D = Frame('D',system)", "ini = [] for item in system.get_state_variables(): if item in variables: ini.append(result[item]) else:", "system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA =", "= Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint =", "= Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE)", "= (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC)", "PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x", "system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import", "x = [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque", "my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq", "KinematicConstraint,AccelerationConstraint import sympy import numpy import matplotlib.pyplot as plt plt.ion() from math import", "in statevariables] N = Frame('N',system) O = Frame('O',system) A = Frame('A',system) B =", "pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC =", "matplotlib.pyplot as plt plt.ion() from math import pi system = System() pynamics.set_system(__name__,system) tol=1e-5", "= pOC + lC*C.x pDtip = pCD + lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs", "Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system)", "pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x", "0, qE: 0, qE_d: 0, } statevariables = system.get_state_variables() ini0 = [initialvalues[item] for", "eq = [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item in eq] eq_dd= [item.time_derivative() for", "mO = Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD", "A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC", "stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y)", "system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x = [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal, ft2 =", "= [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item in eq] eq_dd= [item.time_derivative() for item", "= Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint =", "item in eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0)", "t = numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4", "y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd", "= Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE =", "preload1 = Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5", "0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip =", "Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system)", "pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq = [] eq.append(pBtip-pDtip)", "Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system)", "Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x: 0,", "C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x", "eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO, qA, qB, qC, qD]", "0, } statevariables = system.get_state_variables() ini0 = [initialvalues[item] for item in statevariables] N", "system.addforcegravity(-g*N.y) import pynamics.time_series x = [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width", "preload5 = Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd", "on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD)", "preload2 = Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6", "System() pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC", "= Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd =", "[0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque = my_signal*stall_torque", "points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x vE1 =", "pynamics.output import Output,PointsOutput from pynamics.particle import Particle import pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint", "A.get_w_to(B) wOC = O.get_w_to(C) wCD = C.get_w_to(D) wBD = B.get_w_to(D) wOE = O.get_w_to(E)", "0 tfinal = 10 tstep = 1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system)", "import Output,PointsOutput from pynamics.particle import Particle import pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint import", "for item in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for item in", "vE1 = pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB", "from pynamics.frame import Frame from pynamics.variable_types import Differentiable,Constant from pynamics.system import System from", "system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE = system.getPEGravity(0*N.x) -", "qA: -0.89, qA_d: 0, qB: -2.64, qB_d: 0, qC: -pi+0.89, qC_d: 0, qD:", "stall_torque = Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial = 0 tfinal", "1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system)", "import pynamics from pynamics.frame import Frame from pynamics.variable_types import Differentiable,Constant from pynamics.system import", "from pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy import numpy import matplotlib.pyplot as plt plt.ion()", "heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x", "= Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x: 0, x_d: .5, y: 2, y_d:", "Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system)", "pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy import numpy import matplotlib.pyplot as plt plt.ion() from", "import Frame from pynamics.variable_types import Differentiable,Constant from pynamics.system import System from pynamics.body import", "Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x: 0, x_d: .5, y: 2, y_d: 0,", "-pi+2.64, qD_d: 0, qE: 0, qE_d: 0, } statevariables = system.get_state_variables() ini0 =", "points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system)", "system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x = [0,5,5,7,7,9,9,10] y =", "0, qE_d: 0, } statevariables = system.get_state_variables() ini0 = [initialvalues[item] for item in", "Frame('A',system) B = Frame('B',system) C = Frame('C',system) D = Frame('D',system) E = Frame('E',system)", "guess = [initialvalues[item] for item in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini = []", "lO = Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD", "wCD = C.get_w_to(D) wBD = B.get_w_to(D) wOE = O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA", "= -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s", "PointsOutput(points) y = points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure() for item in y[::30]: plt.plot(*(item.T))", "from pynamics.dyadic import Dyadic from pynamics.output import Output,PointsOutput from pynamics.particle import Particle import", "x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd", "Constant(1,'lD',system) lE = Constant(1,'lE',system) mO = Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB = Constant(.1,'mB',system)", "B = Frame('B',system) C = Frame('C',system) D = Frame('D',system) E = Frame('E',system) system.set_newtonian(N)", "= Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points", "Differentiable('qE',system) initialvalues={ x: 0, x_d: .5, y: 2, y_d: 0, qO: 5*pi/180, qO_d:", "eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) # f,ma", "system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on =", "BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD", ".5, y: 2, y_d: 0, qO: 5*pi/180, qO_d: 0, qA: -0.89, qA_d: 0,", "#BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA", "system.get_KE() PE = system.getPEGravity(0*N.x) - system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot =", "Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b = Constant(1e0,'b',system) k = Constant(1e2,'k',system)", "= Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB)", "Constant(9.81,'g',system) b = Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system)", "= pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA =", "= -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s", "points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y = points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure()", "#BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB", "= Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd =", "pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB = pOA+lA*A.x", "as plt plt.ion() from math import pi system = System() pynamics.set_system(__name__,system) tol=1e-5 lO", "system.get_state_variables(): if item in variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1])", "= [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO, qA, qB, qC, qD] guess", "= Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA =", "for item in statevariables] N = Frame('N',system) O = Frame('O',system) A = Frame('A',system)", "[item.time_derivative() for item in eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar)", "Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1))", "= Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y = points.calc(states,t)", "Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE = Constant(1,'lE',system)", "qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={", "ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y", "system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq = [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item in eq]", "A = Frame('A',system) B = Frame('B',system) C = Frame('C',system) D = Frame('D',system) E", "statevariables] N = Frame('N',system) O = Frame('O',system) A = Frame('A',system) B = Frame('B',system)", "import numpy import matplotlib.pyplot as plt plt.ion() from math import pi system =", "= [initialvalues[item] for item in statevariables] N = Frame('N',system) O = Frame('O',system) A", "pynamics.variable_types import Differentiable,Constant from pynamics.system import System from pynamics.body import Body from pynamics.dyadic", "= Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial = 0 tfinal = 10 tstep =", "qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd", "= pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB =", "= pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB = A.get_w_to(B) wOC = O.get_w_to(C) wCD =", "Email: danaukes<at>gmail.com Please see LICENSE for full license. \"\"\" import pynamics from pynamics.frame", "ini0 = [initialvalues[item] for item in statevariables] N = Frame('N',system) O = Frame('O',system)", "pE2 = pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB = A.get_w_to(B) wOC", "#torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y = points.calc(states,t) y =", "qE_d: 0, } statevariables = system.get_state_variables() ini0 = [initialvalues[item] for item in statevariables]", "eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO, qA, qB, qC, qD] guess = [initialvalues[item] for", "from pynamics.system import System from pynamics.body import Body from pynamics.dyadic import Dyadic from", "= Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x: 0, x_d: .5,", "b_constraint = Constant(1e2,'b_constraint',system) tinitial = 0 tfinal = 10 tstep = 1/30 t", "= Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD =", "states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE = system.getPEGravity(0*N.x) - system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t)", "Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA", "y_d: 0, qO: 5*pi/180, qO_d: 0, qA: -0.89, qA_d: 0, qB: -2.64, qB_d:", "for item in eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) #", "= Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b = Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle =", "<filename>python/pynamics_examples/parallel_five_bar_jumper_foot.py # -*- coding: utf-8 -*- \"\"\" Written by <NAME> Email: danaukes<at>gmail.com Please", "import matplotlib.pyplot as plt plt.ion() from math import pi system = System() pynamics.set_system(__name__,system)", "#BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC", "Differentiable,Constant from pynamics.system import System from pynamics.body import Body from pynamics.dyadic import Dyadic", "ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD)", "eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO, qA, qB, qC, qD] guess = [initialvalues[item]", "= points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure() for item in y[::30]: plt.plot(*(item.T)) #points.animate(fps =", "pynamics.system import System from pynamics.body import Body from pynamics.dyadic import Dyadic from pynamics.output", "system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE = system.getPEGravity(0*N.x) - system.getPESprings() energy", "= Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y", "ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA)", "variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for item in system.get_state_variables(): if item", "system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x = [0,5,5,7,7,9,9,10] y", "system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x = [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal,", "Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE = Constant(.1,'mE',system)", "mE = Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b", "pOC = pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip = pAB + lB*B.x #vBtip =", "I_main = Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b = Constant(1e0,'b',system) k", "= 0 tfinal = 10 tstep = 1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1 =", "in variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x", "= Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd =", "in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for item in system.get_state_variables(): if", "license. \"\"\" import pynamics from pynamics.frame import Frame from pynamics.variable_types import Differentiable,Constant from", "Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b = Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system)", "pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2 =", "= [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) # f,ma =", "Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g = Constant(9.81,'g',system)", "Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD = Constant(1,'lD',system)", "see LICENSE for full license. \"\"\" import pynamics from pynamics.frame import Frame from", "- system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points", "2, y_d: 0, qO: 5*pi/180, qO_d: 0, qA: -0.89, qA_d: 0, qB: -2.64,", "lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO,", "Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE')", "KE = system.get_KE() PE = system.getPEGravity(0*N.x) - system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time()", "preload4 = Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd", "= [0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC)", "points = PointsOutput(points) y = points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure() for item in", "pOA+lA*A.x pBtip = pAB + lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD = pOC +", "system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10)", "-.1*E.y pE1 = pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system)", "tfinal = 10 tstep = 1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2", "my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq = [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for item in", "= system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE = system.getPEGravity(0*N.x) - system.getPESprings()", "0, qB: -2.64, qB_d: 0, qC: -pi+0.89, qC_d: 0, qD: -pi+2.64, qD_d: 0,", "lA = Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE", "= pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip = pAB + lB*B.x #vBtip = pBtip.time_derivative(N,system)", "= pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB = A.get_w_to(B) wOC =", "Frame('C',system) D = Frame('D',system) E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system)", "system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x = [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0]", "(stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD)", "#torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y = points.calc(states,t) y = y.reshape((-1,9,2))", "import pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy import numpy import matplotlib.pyplot as", "= Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd =", "N = Frame('N',system) O = Frame('O',system) A = Frame('A',system) B = Frame('B',system) C", "variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x", "# stretch1 = -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce", "Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) #", "= Frame('N',system) O = Frame('O',system) A = Frame('A',system) B = Frame('B',system) C =", "in eq] eq_dd= [item.time_derivative() for item in eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y))", "Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y = points.calc(states,t) y", "qB: -2.64, qB_d: 0, qC: -pi+0.89, qC_d: 0, qD: -pi+2.64, qD_d: 0, qE:", "= Constant(1e0,'b',system) k = Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque =", "Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE = Constant(1,'lE',system) mO = Constant(2,'mO',system)", "10 tstep = 1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system)", "qD: -pi+2.64, qD_d: 0, qE: 0, qE_d: 0, } statevariables = system.get_state_variables() ini0", "eq_d= [item.time_derivative() for item in eq] eq_dd= [item.time_derivative() for item in eq_d] eq_dd_scalar", "System from pynamics.body import Body from pynamics.dyadic import Dyadic from pynamics.output import Output,PointsOutput", "= B.get_w_to(D) wOE = O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB =", "y = [0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA)", "= Constant(1e2,'b_constraint',system) tinitial = 0 tfinal = 10 tstep = 1/30 t =", "= system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE = system.getPEGravity(0*N.x)", "pE1.time_derivative(N,system) pE2 = pEcm-lE/2*E.x vE2 = pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB = A.get_w_to(B)", "initialvalues={ x: 0, x_d: .5, y: 2, y_d: 0, qO: 5*pi/180, qO_d: 0,", "qC_d: 0, qD: -pi+2.64, qD_d: 0, qE: 0, qE_d: 0, } statevariables =", "numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2 = Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system)", "Constant(1e2,'b_constraint',system) tinitial = 0 tfinal = 10 tstep = 1/30 t = numpy.r_[tinitial:tfinal:tstep]", "[pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip] points = PointsOutput(points) y = points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure() for item", "k = Constant(1e2,'k',system) k_ankle = Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint", "Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system)", "Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system)", "Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA')", "in eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c)", "vE2 = pE2.time_derivative(N,system) wOA = O.get_w_to(A) wAB = A.get_w_to(B) wOC = O.get_w_to(C) wCD", "-*- \"\"\" Written by <NAME> Email: danaukes<at>gmail.com Please see LICENSE for full license.", "system = System() pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB =", "-pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2", "pynamics.time_series x = [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal, ft2 = pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1)", "= Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg =", "O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system)", "= stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2))", "O.get_w_to(C) wCD = C.get_w_to(D) wBD = B.get_w_to(D) wOE = O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system)", "0, qO: 5*pi/180, qO_d: 0, qA: -0.89, qA_d: 0, qB: -2.64, qB_d: 0,", "E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y", "wOC = O.get_w_to(C) wCD = C.get_w_to(D) wBD = B.get_w_to(D) wOE = O.get_w_to(E) BodyO", "= Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b = Constant(1e0,'b',system) k =", "BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD", "= {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE() PE = system.getPEGravity(0*N.x) - system.getPESprings() energy =", "# c.linearize(0) system.add_constraint(c) # f,ma = system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions =", "Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system)", "= Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system) #BodyC = Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE =", "= Constant(0*pi/180,'preload2',system) preload3 = Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6 =", "system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points =", "stretch2 = -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce =", "qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd", "= [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables = [qO, qA, qB,", "= Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd =", "energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t) #torque_plot.plot_time() points = [pDtip,pCD,pOC,pOA,pAB,pBtip,pE1,pE2,pBtip]", "= Constant(0*pi/180,'preload6',system) x,x_d,x_dd = Differentiable('x',system) y,y_d,y_dd = Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd =", "qD] guess = [initialvalues[item] for item in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini =", "pynamics.particle import Particle import pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy import numpy", "-2.64, qB_d: 0, qC: -pi+0.89, qC_d: 0, qD: -pi+2.64, qD_d: 0, qE: 0,", "D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB", "pynamics.body import Body from pynamics.dyadic import Dyadic from pynamics.output import Output,PointsOutput from pynamics.particle", "Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system)", "[] for item in system.get_state_variables(): if item in variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points", "Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD')", "lC = Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE = Constant(1,'lE',system) mO = Constant(2,'mO',system) mA", "Frame from pynamics.variable_types import Differentiable,Constant from pynamics.system import System from pynamics.body import Body", "numpy import matplotlib.pyplot as plt plt.ion() from math import pi system = System()", "= k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x =", "item in variables] result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for item in system.get_state_variables():", "Frame('B',system) C = Frame('C',system) D = Frame('D',system) E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system)", "= 10 tstep = 1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1 = Constant(0*pi/180,'preload1',system) preload2 =", "= .1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq = [] eq.append(pBtip-pDtip) eq_d=", "C = Frame('C',system) D = Frame('D',system) E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system)", "= Differentiable('qE',system) initialvalues={ x: 0, x_d: .5, y: 2, y_d: 0, qO: 5*pi/180,", "eq] eq_dd= [item.time_derivative() for item in eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c", "O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system) pOrigin = 0*N.x+0*N.y pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x", "= PointsOutput(points) y = points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure() for item in y[::30]:", "LICENSE for full license. \"\"\" import pynamics from pynamics.frame import Frame from pynamics.variable_types", "item in system.get_state_variables(): if item in variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points = PointsOutput(points,", "stretch1 = -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce =", "import pi system = System() pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system) lA = Constant(.75,'lA',system)", "[qO, qA, qB, qC, qD] guess = [initialvalues[item] for item in variables] result", "preload3 = Constant(-180*pi/180,'preload3',system) preload4 = Constant(0*pi/180,'preload4',system) preload5 = Constant(180*pi/180,'preload5',system) preload6 = Constant(0*pi/180,'preload6',system) x,x_d,x_dd", "= Constant(1,'lB',system) lC = Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE = Constant(1,'lE',system) mO =", "= Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE =", "= Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC =", "Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main = Constant(1,'I_main',system)", "= Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g =", "stretch1_s = (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2 =", "k_constraint = Constant(1e4,'k_constraint',system) b_constraint = Constant(1e2,'b_constraint',system) tinitial = 0 tfinal = 10 tstep", "tinitial = 0 tfinal = 10 tstep = 1/30 t = numpy.r_[tinitial:tfinal:tstep] preload1", "Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system)", "pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip = pAB + lB*B.x #vBtip = pBtip.time_derivative(N,system) pCD", "y = points.calc(states,t) y = y.reshape((-1,9,2)) plt.figure() for item in y[::30]: plt.plot(*(item.T)) #points.animate(fps", "Differentiable('y',system) qO,qO_d,qO_dd = Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system)", "Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x: 0, x_d: .5, y:", "system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on", "k_ankle = Constant(1e3,'k_ankle',system) b_ankle = Constant(1e1,'b_ankle',system) stall_torque = Constant(2e2,'stall_torque',system) k_constraint = Constant(1e4,'k_constraint',system) b_constraint", "system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10)", "import Dyadic from pynamics.output import Output,PointsOutput from pynamics.particle import Particle import pynamics.integration from", "pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1 = pEcm+lE/2*E.x vE1 = pE1.time_derivative(N,system) pE2", "= A.get_w_to(B) wOC = O.get_w_to(C) wCD = C.get_w_to(D) wBD = B.get_w_to(D) wOE =", "points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip -.1*E.y pE1", "Please see LICENSE for full license. \"\"\" import pynamics from pynamics.frame import Frame", "mC = Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg", "D = Frame('D',system) E = Frame('E',system) system.set_newtonian(N) O.rotate_fixed_axis(N,[0,0,1],qO,system) A.rotate_fixed_axis(N,[0,0,1],qA,system) B.rotate_fixed_axis(N,[0,0,1],qB,system) C.rotate_fixed_axis(N,[0,0,1],qC,system) D.rotate_fixed_axis(N,[0,0,1],qD,system) E.rotate_fixed_axis(N,[0,0,1],qE,system)", "#ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y) stretch1_s", "= system.getPEGravity(0*N.x) - system.getPESprings() energy = Output([KE-PE,toeforce,heelforce]) energy.calc(states,t) energy.plot_time() #torque_plot = Output([torque]) #torque_plot.calc(states,t)", "pOcm=x*N.x+y*N.y pOA = pOcm+lO/2*O.x pOC = pOcm-lO/2*O.x pAB = pOA+lA*A.x pBtip = pAB", "else: ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x pBcm=pAB+lB/2*B.x pCcm=pOC+lC/2*C.x pDcm=pCD+lD/2*D.x pEcm=pBtip", "AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) # f,ma = system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions", "= Constant(.75,'lC',system) lD = Constant(1,'lD',system) lE = Constant(1,'lE',system) mO = Constant(2,'mO',system) mA =", "item in variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time() pAcm=pOA+lA/2*A.x", "= Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD =", "\"\"\" Written by <NAME> Email: danaukes<at>gmail.com Please see LICENSE for full license. \"\"\"", "= pynamics.time_series.build_smoothed_time_signal(x,y,t,'my_signal',window_time_width = .1) torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq = []", "if item in variables: ini.append(result[item]) else: ini.append(initialvalues[item]) points = PointsOutput(points, constant_values=system.constant_values) points.calc(numpy.array([ini0,ini]),[0,1]) points.plot_time()", "+ lC*C.x pDtip = pCD + lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = []", "wAB = A.get_w_to(B) wOC = O.get_w_to(C) wCD = C.get_w_to(D) wBD = B.get_w_to(D) wOE", "constraint_system=KinematicConstraint(eqs) variables = [qO, qA, qB, qC, qD] guess = [initialvalues[item] for item", "(stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1) toeforce = k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s", "# -*- coding: utf-8 -*- \"\"\" Written by <NAME> Email: danaukes<at>gmail.com Please see", "danaukes<at>gmail.com Please see LICENSE for full license. \"\"\" import pynamics from pynamics.frame import", "= stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2) system.addforce(-b_constraint*vE2*on,vE2) heelforce = k_constraint*-stretch2_s system.add_spring_force1(k,(qA-qO-preload1)*N.z,wOA) system.add_spring_force1(k,(qB-qA-preload2)*N.z,wAB) system.add_spring_force1(k,(qC-qO-preload3)*N.z,wOC) system.add_spring_force1(k,(qD-qC-preload4)*N.z,wCD) system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE)", "-*- coding: utf-8 -*- \"\"\" Written by <NAME> Email: danaukes<at>gmail.com Please see LICENSE", "= Body('BodyC',C,pCcm,mC,Dyadic.build(C,I_leg,I_leg,I_leg),system) #BodyD = Body('BodyD',D,pDcm,mD,Dyadic.build(D,I_leg,I_leg,I_leg),system) BodyE = Body('BodyE',E,pEcm,mE,Dyadic.build(D,I_leg,I_leg,I_leg),system) ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB =", "toeforce = k_constraint*-stretch1_s stretch2 = -pE2.dot(N.y) stretch2_s = (stretch2+abs(stretch2)) on = stretch2_s/(2*stretch2+1e-10) system.add_spring_force1(k_constraint,-stretch2_s*N.y,vE2)", "statevariables = system.get_state_variables() ini0 = [initialvalues[item] for item in statevariables] N = Frame('N',system)", "= Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd =", "lE = Constant(1,'lE',system) mO = Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC", "Particle import pynamics.integration from pynamics.constraint import KinematicConstraint,AccelerationConstraint import sympy import numpy import matplotlib.pyplot", "ParticleA = Particle(pAcm,mA,'ParticleA') ParticleB = Particle(pBcm,mB,'ParticleB') ParticleC = Particle(pCcm,mC,'ParticleC') ParticleD = Particle(pDcm,mD,'ParticleD') #ParticleE", "system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on = stretch1_s/(2*stretch1+1e-10) system.add_spring_force1(k_constraint,-stretch1_s*N.y,vE1) system.addforce(-b_constraint*vE1*on,vE1)", "mA = Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE", "y: 2, y_d: 0, qO: 5*pi/180, qO_d: 0, qA: -0.89, qA_d: 0, qB:", "eq_d] eq_dd_scalar = [] eq_dd_scalar.append(eq_dd[0].dot(N.x)) eq_dd_scalar.append(eq_dd[0].dot(N.y)) c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) #", "= Differentiable('qO',system) qA,qA_d,qA_dd = Differentiable('qA',system) qB,qB_d,qB_dd = Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd =", "= Constant(1,'lE',system) mO = Constant(2,'mO',system) mA = Constant(.1,'mA',system) mB = Constant(.1,'mB',system) mC =", "result = constraint_system.solve_numeric(variables,guess,system.constant_values) ini = [] for item in system.get_state_variables(): if item in", "B.get_w_to(D) wOE = O.get_w_to(E) BodyO = Body('BodyO',O,pOcm,mO,Dyadic.build(O,I_main,I_main,I_main),system) #BodyA = Body('BodyA',A,pAcm,mA,Dyadic.build(A,I_leg,I_leg,I_leg),system) #BodyB = Body('BodyB',B,pBcm,mB,Dyadic.build(B,I_leg,I_leg,I_leg),system)", "= Particle(pDcm,mD,'ParticleD') #ParticleE = Particle(pEcm,mE,'ParticleE') system.addforce(-b*wOA,wOA) system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 =", "0, qA: -0.89, qA_d: 0, qB: -2.64, qB_d: 0, qC: -pi+0.89, qC_d: 0,", "from pynamics.output import Output,PointsOutput from pynamics.particle import Particle import pynamics.integration from pynamics.constraint import", "= AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) # f,ma = system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants =", "Written by <NAME> Email: danaukes<at>gmail.com Please see LICENSE for full license. \"\"\" import", "+ lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables =", "= Differentiable('qB',system) qC,qC_d,qC_dd = Differentiable('qC',system) qD,qD_d,qD_dd = Differentiable('qD',system) qE,qE_d,qE_dd = Differentiable('qE',system) initialvalues={ x:", "system.add_spring_force1(k,(qD-qB-preload5)*N.z,wBD) system.add_spring_force1(k_ankle,(qE-qO-preload6)*N.z,wOE) system.addforcegravity(-g*N.y) import pynamics.time_series x = [0,5,5,7,7,9,9,10] y = [0,0,1,1,-1,-1,0,0] my_signal, ft2", "c = AccelerationConstraint(eq_dd_scalar) # c.linearize(0) system.add_constraint(c) # f,ma = system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants", "by <NAME> Email: danaukes<at>gmail.com Please see LICENSE for full license. \"\"\" import pynamics", "qC: -pi+0.89, qC_d: 0, qD: -pi+2.64, qD_d: 0, qE: 0, qE_d: 0, }", "= Constant(.1,'mE',system) I_main = Constant(1,'I_main',system) I_leg = Constant(.1,'I_leg',system) g = Constant(9.81,'g',system) b =", "y = y.reshape((-1,9,2)) plt.figure() for item in y[::30]: plt.plot(*(item.T)) #points.animate(fps = 30, movie_name='parallel_five_bar_jumper_foot.mp4',lw=2)", "tol=1e-5 lO = Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB = Constant(1,'lB',system) lC = Constant(.75,'lC',system)", "mB = Constant(.1,'mB',system) mC = Constant(.1,'mC',system) mD = Constant(.1,'mD',system) mE = Constant(.1,'mE',system) I_main", "= pCD + lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs)", "= System() pynamics.set_system(__name__,system) tol=1e-5 lO = Constant(.5,'lO',system) lA = Constant(.75,'lA',system) lB = Constant(1,'lB',system)", "pCD + lD*D.x points = [pDtip,pCD,pOC,pOA,pAB,pBtip] eqs = [] eqs.append((pBtip-pDtip).dot(N.x)) eqs.append((pBtip-pDtip).dot(N.y)) constraint_system=KinematicConstraint(eqs) variables", "f,ma = system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE =", "= system.getdynamics() func1 = system.state_space_post_invert(f,ma,constants = system.constant_values,variable_functions = {my_signal:ft2}) states=pynamics.integration.integrate_odeint(func1,ini,t,rtol=tol,atol=tol) KE = system.get_KE()", "= Frame('A',system) B = Frame('B',system) C = Frame('C',system) D = Frame('D',system) E =", "system.addforce(-b*wAB,wAB) system.addforce(-b*wOC,wOC) system.addforce(-b*wCD,wCD) system.addforce(-b_ankle*wOE,wOE) # stretch1 = -pE1.dot(N.y) stretch1_s = (stretch1+abs(stretch1)) on =", "torque = my_signal*stall_torque system.addforce(torque*O.z,wOA) system.addforce(-torque*O.z,wOC) # eq = [] eq.append(pBtip-pDtip) eq_d= [item.time_derivative() for" ]
[ "def list_file(file,loc_result): n_folder=[];names=[] for i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return", "pathlib import Path #libreria para manejar opciones del sistema import os #==Funciones auxiliares========================================================", "else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[] for j in range(3):", "list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo): list_match=[] for i in range(len(file_exp)):", "range(len(file_theo)): if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def file(file_exp,file_theo): list_match=[] for", "import Path #libreria para manejar opciones del sistema import os #==Funciones auxiliares======================================================== def", "in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def file(file_exp,file_theo): list_match=[] for i in range(len(file_exp)):", "if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[] for", "# Manejo de datos #import pandas as pd #multiples data frames #from pathlib", "n_folder=[];names=[] for i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def", "datos #import pandas as pd #multiples data frames #from pathlib import Path #libreria", "para manejar opciones del sistema import os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False:", "in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo): list_match=[] for", "Manejo de datos #import pandas as pd #multiples data frames #from pathlib import", "os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[]", "if energy==2: type_energy=['H','M','L'] n_folder=[] for j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if", "energy==2: type_energy=['H','M','L'] n_folder=[] for j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str:", "type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result): n_folder=[];names=[] for i in", "manejar opciones del sistema import os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result)", "auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False:", "match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if", "#import pandas as pd #multiples data frames #from pathlib import Path #libreria para", "os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1]", "#==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if", "names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return", "k in range(len(file_theo)): if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def file(file_exp,file_theo):", "return n_folder,names def select_match(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for", "file(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)):", "type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[] for j", "in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/')", "sistema import os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1]", "range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder", "loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[] for j in", "n_folder=[] for j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','')", "os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[] for j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False:", "origen=origen.replace('.csv','') for k in range(len(file_theo)): if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match", "os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str:", "list_match def file(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k", "Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result):", "#====Librerias usadas========================================================== # Manejo de datos #import pandas as pd #multiples data frames", "name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)): if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break", "os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L']", "list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1]", "select_match(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)):", "if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def file(file_exp,file_theo): list_match=[] for i", "else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result): n_folder=[];names=[] for i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','')", "list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo): list_match=[] for i in", "list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)): if", "if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if", "def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen)", "in range(len(file_theo)): if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def file(file_exp,file_theo): list_match=[]", "def file(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in", "origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)): if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return", "#libreria para manejar opciones del sistema import os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if", "usadas========================================================== # Manejo de datos #import pandas as pd #multiples data frames #from", "return n_folder def list_file(file,loc_result): n_folder=[];names=[] for i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file)", "origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def file(file_exp,file_theo): list_match=[] for i in", "#from pathlib import Path #libreria para manejar opciones del sistema import os #==Funciones", "import os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else:", "Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else:", "type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else:", "break return list_match def file(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','')", "<filename>Comparison_fun_2.py #====Librerias usadas========================================================== # Manejo de datos #import pandas as pd #multiples data", "#multiples data frames #from pathlib import Path #libreria para manejar opciones del sistema", "return list_match def file(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for", "for i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo):", "pandas as pd #multiples data frames #from pathlib import Path #libreria para manejar", "j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else:", "Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/')", "os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','')", "in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)): if origen in file_theo[k]:", "for k in range(len(file_theo)): if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def", "else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def", "i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo): list_match=[]", "n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','')", "de datos #import pandas as pd #multiples data frames #from pathlib import Path", "if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str: loc_origen=loc_result+file.split('/')[1] else: loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2:", "else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result): n_folder=[];names=[] for", "opciones del sistema import os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if", "file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def file(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1]", "frames #from pathlib import Path #libreria para manejar opciones del sistema import os", "n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder", "pd #multiples data frames #from pathlib import Path #libreria para manejar opciones del", "data frames #from pathlib import Path #libreria para manejar opciones del sistema import", "def select_match(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in", "for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)): if origen", "if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder) else: Folder,names=list_file(file,Loc_final+'/') n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','')", "del sistema import os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False): if os.path.exists(loc_result)==False: os.mkdir(loc_result) if type(file)==str:", "range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)): if origen in file_theo[k]: list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen])", "os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[] for j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final)", "n_folder def list_file(file,loc_result): n_folder=[];names=[] for i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file)", "list_match.append([file_theo[k],file_exp[i],name.replace('.csv',''),origen]) break return list_match def file(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1]", "i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k in range(len(file_theo)): if origen in", "range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo): list_match=[] for i", "if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[] for j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if", "n_folder,names def select_match(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1] origen=name.split('_')[1] origen=origen.replace('.csv','') for k", "if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result): n_folder=[];names=[] for i", "Path #libreria para manejar opciones del sistema import os #==Funciones auxiliares======================================================== def match_name(file,loc_result,energy=False):", "type_energy=['H','M','L'] n_folder=[] for j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','')", "names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names def select_match(file_exp,file_theo): list_match=[] for i in range(len(file_exp)): name=file_exp[i].split('/')[1]", "n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result): n_folder=[];names=[] for i in range(len(file)):", "for j in range(3): Loc_final=loc_origen+'/'+type_energy[j] if os.path.exists(Loc_final)==False: os.mkdir(Loc_final) if type(file)==str: Folder=Loc_final+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') n_folder.append(Folder)", "list_file(file,loc_result): n_folder=[];names=[] for i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file) list_file=('match_'+list_file) n_folder.append(loc_result+list_file) return n_folder,names", "names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result): n_folder=[];names=[] for i in range(len(file)): list_file=file[i].split('/')[2]", "loc_origen=loc_result+file[0].split('/')[1] if os.path.exists(loc_origen)==False: os.mkdir(loc_origen) if energy==2: type_energy=['H','M','L'] n_folder=[] for j in range(3): Loc_final=loc_origen+'/'+type_energy[j]", "n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result): n_folder=[];names=[] for i in range(len(file)): list_file=file[i].split('/')[2] list_file=list_file.replace('.csv','') names.append(list_file)", "as pd #multiples data frames #from pathlib import Path #libreria para manejar opciones", "n_folder=n_folder+Folder else: if type(file)==str: n_folder=loc_origen+'/match_'+file.split('/')[2].replace('.csv','') names=file.split('/')[2].replace('.csv','') else: n_folder,names=list_file(file,loc_origen+'/') return n_folder def list_file(file,loc_result): n_folder=[];names=[]" ]
[ "5, 6, 7, 8, 9, 10, 11, 12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c", "{ \"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year), \"month\":", "\"19\", \"20\", \"21\", \"22\", \"23\", \"24\", \"25\", \"26\", \"27\", \"28\", \"29\", \"30\", \"31\",", "Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS: for month in MONTHS:", "import Path import cdsapi YEARS = [2019] MONTHS = [1, 2, 3, 4,", "import cdsapi YEARS = [2019] MONTHS = [1, 2, 3, 4, 5, 6,", "\"30\", \"31\", ], \"time\": [ \"00:00\", \"06:00\", \"12:00\", \"18:00\", ], }, str(ROOT /", "\"17\", \"18\", \"19\", \"20\", \"21\", \"22\", \"23\", \"24\", \"25\", \"26\", \"27\", \"28\", \"29\",", "\"20\", \"21\", \"22\", \"23\", \"24\", \"25\", \"26\", \"27\", \"28\", \"29\", \"30\", \"31\", ],", "\"24\", \"25\", \"26\", \"27\", \"28\", \"29\", \"30\", \"31\", ], \"time\": [ \"00:00\", \"06:00\",", "from pathlib import Path import cdsapi YEARS = [2019] MONTHS = [1, 2,", "MONTHS = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11,", "[ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year), \"month\": month, \"day\": [ \"01\", \"02\", \"03\",", "\"05\", \"06\", \"07\", \"08\", \"09\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\",", "\"04\", \"05\", \"06\", \"07\", \"08\", \"09\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\",", "4, 5, 6, 7, 8, 9, 10, 11, 12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True)", "\"29\", \"30\", \"31\", ], \"time\": [ \"00:00\", \"06:00\", \"12:00\", \"18:00\", ], }, str(ROOT", "YEARS = [2019] MONTHS = [1, 2, 3, 4, 5, 6, 7, 8,", "year in YEARS: for month in MONTHS: month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", {", "in YEARS: for month in MONTHS: month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\":", "str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ],", "c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\":", "3, 4, 5, 6, 7, 8, 9, 10, 11, 12] ROOT = Path(\"wind_data\")", "\"18\", \"19\", \"20\", \"21\", \"22\", \"23\", \"24\", \"25\", \"26\", \"27\", \"28\", \"29\", \"30\",", "YEARS: for month in MONTHS: month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\",", "month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\",", "\"09\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"21\",", "in MONTHS: month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\":", "], \"time\": [ \"00:00\", \"06:00\", \"12:00\", \"18:00\", ], }, str(ROOT / f\"CDS_wind_{year}_{month}.nc\"), )", "= cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS: for month in MONTHS: month = str(month).zfill(2)", "\"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"21\", \"22\", \"23\", \"24\", \"25\", \"26\", \"27\",", "c = cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS: for month in MONTHS: month =", "\"year\": str(year), \"month\": month, \"day\": [ \"01\", \"02\", \"03\", \"04\", \"05\", \"06\", \"07\",", "pathlib import Path import cdsapi YEARS = [2019] MONTHS = [1, 2, 3,", "12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS: for", "\"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"21\", \"22\",", "\"10m_v_component_of_wind\", ], \"year\": str(year), \"month\": month, \"day\": [ \"01\", \"02\", \"03\", \"04\", \"05\",", "\"07\", \"08\", \"09\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\",", "\"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"21\", \"22\", \"23\", \"24\",", "\"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year), \"month\": month, \"day\": [ \"01\", \"02\",", "\"08\", \"09\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\",", "\"21\", \"22\", \"23\", \"24\", \"25\", \"26\", \"27\", \"28\", \"29\", \"30\", \"31\", ], \"time\":", "\"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year), \"month\": month, \"day\": [", "\"reanalysis\", \"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year), \"month\": month, \"day\":", "= Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS: for month in", "\"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"21\", \"22\", \"23\", \"24\", \"25\",", "= [2019] MONTHS = [1, 2, 3, 4, 5, 6, 7, 8, 9,", "ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS: for month in MONTHS: month", "9, 10, 11, 12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for year", "6, 7, 8, 9, 10, 11, 12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c =", "= str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\",", "\"23\", \"24\", \"25\", \"26\", \"27\", \"28\", \"29\", \"30\", \"31\", ], \"time\": [ \"00:00\",", "\"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year),", "\"25\", \"26\", \"27\", \"28\", \"29\", \"30\", \"31\", ], \"time\": [ \"00:00\", \"06:00\", \"12:00\",", "], \"year\": str(year), \"month\": month, \"day\": [ \"01\", \"02\", \"03\", \"04\", \"05\", \"06\",", "\"01\", \"02\", \"03\", \"04\", \"05\", \"06\", \"07\", \"08\", \"09\", \"10\", \"11\", \"12\", \"13\",", "month in MONTHS: month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\": \"netcdf\",", "cdsapi YEARS = [2019] MONTHS = [1, 2, 3, 4, 5, 6, 7,", "\"27\", \"28\", \"29\", \"30\", \"31\", ], \"time\": [ \"00:00\", \"06:00\", \"12:00\", \"18:00\", ],", "[2019] MONTHS = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10,", "8, 9, 10, 11, 12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for", "\"06\", \"07\", \"08\", \"09\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\",", "\"31\", ], \"time\": [ \"00:00\", \"06:00\", \"12:00\", \"18:00\", ], }, str(ROOT / f\"CDS_wind_{year}_{month}.nc\"),", "\"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year), \"month\": month, \"day\": [ \"01\", \"02\", \"03\", \"04\",", "month, \"day\": [ \"01\", \"02\", \"03\", \"04\", \"05\", \"06\", \"07\", \"08\", \"09\", \"10\",", "10, 11, 12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for year in", "str(year), \"month\": month, \"day\": [ \"01\", \"02\", \"03\", \"04\", \"05\", \"06\", \"07\", \"08\",", "\"22\", \"23\", \"24\", \"25\", \"26\", \"27\", \"28\", \"29\", \"30\", \"31\", ], \"time\": [", "\"03\", \"04\", \"05\", \"06\", \"07\", \"08\", \"09\", \"10\", \"11\", \"12\", \"13\", \"14\", \"15\",", "\"02\", \"03\", \"04\", \"05\", \"06\", \"07\", \"08\", \"09\", \"10\", \"11\", \"12\", \"13\", \"14\",", "cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS: for month in MONTHS: month = str(month).zfill(2) c.retrieve(", "\"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year), \"month\": month,", "\"28\", \"29\", \"30\", \"31\", ], \"time\": [ \"00:00\", \"06:00\", \"12:00\", \"18:00\", ], },", "2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] ROOT =", "for month in MONTHS: month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\":", "Path import cdsapi YEARS = [2019] MONTHS = [1, 2, 3, 4, 5,", "7, 8, 9, 10, 11, 12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\")", "MONTHS: month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\", { \"product_type\": \"reanalysis\", \"format\": \"netcdf\", \"variable\": [", "= [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]", "\"26\", \"27\", \"28\", \"29\", \"30\", \"31\", ], \"time\": [ \"00:00\", \"06:00\", \"12:00\", \"18:00\",", "for year in YEARS: for month in MONTHS: month = str(month).zfill(2) c.retrieve( \"reanalysis-era5-single-levels\",", "\"11\", \"12\", \"13\", \"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"21\", \"22\", \"23\",", "\"14\", \"15\", \"16\", \"17\", \"18\", \"19\", \"20\", \"21\", \"22\", \"23\", \"24\", \"25\", \"26\",", "\"month\": month, \"day\": [ \"01\", \"02\", \"03\", \"04\", \"05\", \"06\", \"07\", \"08\", \"09\",", "\"16\", \"17\", \"18\", \"19\", \"20\", \"21\", \"22\", \"23\", \"24\", \"25\", \"26\", \"27\", \"28\",", "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] ROOT", "[ \"01\", \"02\", \"03\", \"04\", \"05\", \"06\", \"07\", \"08\", \"09\", \"10\", \"11\", \"12\",", "ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS: for month", "\"day\": [ \"01\", \"02\", \"03\", \"04\", \"05\", \"06\", \"07\", \"08\", \"09\", \"10\", \"11\",", "\"netcdf\", \"variable\": [ \"10m_u_component_of_wind\", \"10m_v_component_of_wind\", ], \"year\": str(year), \"month\": month, \"day\": [ \"01\",", "11, 12] ROOT = Path(\"wind_data\") ROOT.mkdir(exist_ok=True) c = cdsapi.Client(key=\"YOUR_API_KEY\") for year in YEARS:" ]
[ "username already exists.\"}, 400 user = UserModel(**data) try: user.save_to_db() except Exception as e:", "-> Tuple[Dict[str, str], int]: data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user", "UserModel(**data) try: user.save_to_db() except Exception as e: return {\"message\", \"An error ocurred creating", "type=str, required=True, help=\"This field cannot be left blank!\" ) def post(self) -> Tuple[Dict[str,", "already exists.\"}, 400 user = UserModel(**data) try: user.save_to_db() except Exception as e: return", "Dict, Tuple from flask_restful import Resource, reqparse from models.user import UserModel class UserRegister(Resource):", "400 user = UserModel(**data) try: user.save_to_db() except Exception as e: return {\"message\", \"An", "UserModel class UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True, help=\"This field cannot", "from flask_restful import Resource, reqparse from models.user import UserModel class UserRegister(Resource): parser =", "required=True, help=\"This field cannot be left blank!\" ) def post(self) -> Tuple[Dict[str, str],", "left blank!\" ) parser.add_argument( \"password\", type=str, required=True, help=\"This field cannot be left blank!\"", "parser = reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True, help=\"This field cannot be left blank!\"", "help=\"This field cannot be left blank!\" ) def post(self) -> Tuple[Dict[str, str], int]:", "required=True, help=\"This field cannot be left blank!\" ) parser.add_argument( \"password\", type=str, required=True, help=\"This", "cannot be left blank!\" ) def post(self) -> Tuple[Dict[str, str], int]: data =", "UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user with that username already exists.\"}, 400", "except Exception as e: return {\"message\", \"An error ocurred creating the user.\"}, 500", "\"A user with that username already exists.\"}, 400 user = UserModel(**data) try: user.save_to_db()", "field cannot be left blank!\" ) def post(self) -> Tuple[Dict[str, str], int]: data", "post(self) -> Tuple[Dict[str, str], int]: data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A", "return {\"message\", \"An error ocurred creating the user.\"}, 500 return {\"message\": \"User created", "left blank!\" ) def post(self) -> Tuple[Dict[str, str], int]: data = UserRegister.parser.parse_args() if", "parser.add_argument( \"username\", type=str, required=True, help=\"This field cannot be left blank!\" ) parser.add_argument( \"password\",", "help=\"This field cannot be left blank!\" ) parser.add_argument( \"password\", type=str, required=True, help=\"This field", "type=str, required=True, help=\"This field cannot be left blank!\" ) parser.add_argument( \"password\", type=str, required=True,", "user with that username already exists.\"}, 400 user = UserModel(**data) try: user.save_to_db() except", "from models.user import UserModel class UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True,", "reqparse from models.user import UserModel class UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument( \"username\", type=str,", "parser.add_argument( \"password\", type=str, required=True, help=\"This field cannot be left blank!\" ) def post(self)", ") parser.add_argument( \"password\", type=str, required=True, help=\"This field cannot be left blank!\" ) def", "try: user.save_to_db() except Exception as e: return {\"message\", \"An error ocurred creating the", "models.user import UserModel class UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True, help=\"This", "= UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user with that username already exists.\"},", "that username already exists.\"}, 400 user = UserModel(**data) try: user.save_to_db() except Exception as", "\"username\", type=str, required=True, help=\"This field cannot be left blank!\" ) parser.add_argument( \"password\", type=str,", "<reponame>jacoboviii/flask-rest-api from typing import Dict, Tuple from flask_restful import Resource, reqparse from models.user", "reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True, help=\"This field cannot be left blank!\" ) parser.add_argument(", "UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True, help=\"This field cannot be left", "class UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True, help=\"This field cannot be", "\"password\", type=str, required=True, help=\"This field cannot be left blank!\" ) def post(self) ->", "Tuple[Dict[str, str], int]: data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user with", "e: return {\"message\", \"An error ocurred creating the user.\"}, 500 return {\"message\": \"User", "def post(self) -> Tuple[Dict[str, str], int]: data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\":", "import Resource, reqparse from models.user import UserModel class UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument(", "field cannot be left blank!\" ) parser.add_argument( \"password\", type=str, required=True, help=\"This field cannot", "import Dict, Tuple from flask_restful import Resource, reqparse from models.user import UserModel class", "typing import Dict, Tuple from flask_restful import Resource, reqparse from models.user import UserModel", "int]: data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user with that username", "if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user with that username already exists.\"}, 400 user", "= UserModel(**data) try: user.save_to_db() except Exception as e: return {\"message\", \"An error ocurred", "Exception as e: return {\"message\", \"An error ocurred creating the user.\"}, 500 return", "\"An error ocurred creating the user.\"}, 500 return {\"message\": \"User created successfully\"}, 201", "Tuple from flask_restful import Resource, reqparse from models.user import UserModel class UserRegister(Resource): parser", ") def post(self) -> Tuple[Dict[str, str], int]: data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return", "from typing import Dict, Tuple from flask_restful import Resource, reqparse from models.user import", "be left blank!\" ) def post(self) -> Tuple[Dict[str, str], int]: data = UserRegister.parser.parse_args()", "{\"message\", \"An error ocurred creating the user.\"}, 500 return {\"message\": \"User created successfully\"},", "UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user with that username already exists.\"}, 400 user =", "user.save_to_db() except Exception as e: return {\"message\", \"An error ocurred creating the user.\"},", "flask_restful import Resource, reqparse from models.user import UserModel class UserRegister(Resource): parser = reqparse.RequestParser()", "import UserModel class UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True, help=\"This field", "data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user with that username already", "return {\"message\": \"A user with that username already exists.\"}, 400 user = UserModel(**data)", "Resource, reqparse from models.user import UserModel class UserRegister(Resource): parser = reqparse.RequestParser() parser.add_argument( \"username\",", "blank!\" ) def post(self) -> Tuple[Dict[str, str], int]: data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]):", "str], int]: data = UserRegister.parser.parse_args() if UserModel.find_by_username(data[\"username\"]): return {\"message\": \"A user with that", "be left blank!\" ) parser.add_argument( \"password\", type=str, required=True, help=\"This field cannot be left", "cannot be left blank!\" ) parser.add_argument( \"password\", type=str, required=True, help=\"This field cannot be", "blank!\" ) parser.add_argument( \"password\", type=str, required=True, help=\"This field cannot be left blank!\" )", "= reqparse.RequestParser() parser.add_argument( \"username\", type=str, required=True, help=\"This field cannot be left blank!\" )", "with that username already exists.\"}, 400 user = UserModel(**data) try: user.save_to_db() except Exception", "user = UserModel(**data) try: user.save_to_db() except Exception as e: return {\"message\", \"An error", "as e: return {\"message\", \"An error ocurred creating the user.\"}, 500 return {\"message\":", "{\"message\": \"A user with that username already exists.\"}, 400 user = UserModel(**data) try:", "exists.\"}, 400 user = UserModel(**data) try: user.save_to_db() except Exception as e: return {\"message\"," ]
[ "bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose individual elements\") Test.True(bounds.Contains(bound2), \"varify bounds enclose individual elements\")", "LLC. See License.txt. import System.Xml import Scea.Dom import Test #create new document print", "fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2 states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName=", "bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose individual elements\") Test.True(bounds.Contains(bound2),", "2 states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1", "editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\") #place two elements apart from each", "state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\") #place two elements apart from", "fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1), \"varify bounds", "other, note Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192,", "state1 = DomObject(stateType) state2 = DomObject(stateType) list = List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2,", "2014 Sony Computer Entertainment America LLC. See License.txt. import System.Xml import Scea.Dom import", "r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2 =", "List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\") #place two elements", "= False #create 2 states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType", "DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2 = DomObject(stateType) list = List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list)", "License.txt. import System.Xml import Scea.Dom import Test #create new document print editor fsmDocument", "#Copyright (c) 2014 Sony Computer Entertainment America LLC. See License.txt. import System.Xml import", "Scea.Dom import Test #create new document print editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid =", "= fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1), \"varify", "document print editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2 states namespace", "= Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0])", "Test.True(bounds.Contains(bound1), \"varify bounds enclose individual elements\") Test.True(bounds.Contains(bound2), \"varify bounds enclose individual elements\") print", "bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1),", "Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\") #place two elements apart from each other,", "import Test #create new document print editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False", "varify bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose individual elements\") Test.True(bounds.Contains(bound2), \"varify bounds enclose individual", "namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType)", "False #create 2 states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType =", "'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2 = DomObject(stateType) list =", "inserted\") #place two elements apart from each other, note Element.Position only acccepts integers", "states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 =", "Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify", "<reponame>migueldeicaza/ATF #Copyright (c) 2014 Sony Computer Entertainment America LLC. See License.txt. import System.Xml", "fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\") #place two elements apart from each other, note", "integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1", "fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2 states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace)", "list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\") #place two elements apart", "state2 = DomObject(stateType) list = List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2", "apart from each other, note Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128)", "Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds", "elements apart from each other, note Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96,", "128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 =", "Sony Computer Entertainment America LLC. See License.txt. import System.Xml import Scea.Dom import Test", "stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2 = DomObject(stateType)", "= DomObject(stateType) state2 = DomObject(stateType) list = List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count,", "= fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose individual elements\") Test.True(bounds.Contains(bound2), \"varify", "#place two elements apart from each other, note Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position", "America LLC. See License.txt. import System.Xml import Scea.Dom import Test #create new document", "fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose individual elements\")", "atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2 states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType',", "note Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228)", "DomObject(stateType) list = List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\")", "acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements)", "stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2 = DomObject(stateType) list = List[DomObject]() list.AddRange([state1,", "# varify bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose individual elements\") Test.True(bounds.Contains(bound2), \"varify bounds enclose", "= Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) #", "each other, note Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position =", "228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds", "(c) 2014 Sony Computer Entertainment America LLC. See License.txt. import System.Xml import Scea.Dom", "fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1])", "System.Xml.XmlQualifiedName( 'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2 = DomObject(stateType) list", "namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2 = DomObject(stateType) list = List[DomObject]()", "list = List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\") #place", "\"varify bounds enclose individual elements\") Test.True(bounds.Contains(bound2), \"varify bounds enclose individual elements\") print Test.SUCCESS", "print editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2 states namespace =", "= atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2 states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName(", "two elements apart from each other, note Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position =", "= DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2 = DomObject(stateType) list = List[DomObject]() list.AddRange([state1, state2])", "= List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements inserted\") #place two", "new document print editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2 states", "Entertainment America LLC. See License.txt. import System.Xml import Scea.Dom import Test #create new", "elements inserted\") #place two elements apart from each other, note Element.Position only acccepts", "2 elements inserted\") #place two elements apart from each other, note Element.Position only", "Computer Entertainment America LLC. See License.txt. import System.Xml import Scea.Dom import Test #create", "fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 =", "= fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose individual", "See License.txt. import System.Xml import Scea.Dom import Test #create new document print editor", "Test #create new document print editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create", "= r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName) state1 = DomObject(stateType) state2", "\"verify 2 elements inserted\") #place two elements apart from each other, note Element.Position", "bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose", "#create new document print editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2", "editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid = False #create 2 states namespace = r'http://www.scea.com/FSM/1_0'", "#create 2 states namespace = r'http://www.scea.com/FSM/1_0' stateTypeName= System.Xml.XmlQualifiedName( 'stateType', namespace) stateType = DomSchemaRegistry.GetComplexType(stateTypeName)", "only acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds =", "= DomObject(stateType) list = List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify 2 elements", "Point(96, 128) fsmDocument.Circuit.Elements[1].Position = Point(192, 228) bounds = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements) bound1 = fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[0]) bound2", "fsmDocument.CircuitControl.GetBoundsF(fsmDocument.Circuit.Elements[1]) # varify bounds Test.True(bounds.Contains(bound1), \"varify bounds enclose individual elements\") Test.True(bounds.Contains(bound2), \"varify bounds", "from each other, note Element.Position only acccepts integers fsmDocument.Circuit.Elements[0].Position = Point(96, 128) fsmDocument.Circuit.Elements[1].Position", "System.Xml import Scea.Dom import Test #create new document print editor fsmDocument = atfFile.OpenNewDocument(editor)", "DomObject(stateType) state2 = DomObject(stateType) list = List[DomObject]() list.AddRange([state1, state2]) editor.Insert(list) Test.Equal(2, fsmDocument.Circuit.Elements.Count, \"verify", "import Scea.Dom import Test #create new document print editor fsmDocument = atfFile.OpenNewDocument(editor) fsmDocument.CircuitControl.Style.SnapToGrid", "import System.Xml import Scea.Dom import Test #create new document print editor fsmDocument =" ]
[ "chat['time'] < now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] = now chat['delay'] = chat['delay'] *", "be called periodically to send idle messages. # The handler parameter we get", "bot that will converse with the user 'bot': Chat(pairs, reflections), # The time", "a few minutes.\", \"I'll take a short break. Ping me when you're back.\",", "messages. # The handler parameter we get here is stored to send future", "Then double the delay so that we don't keep sending idle messages. '''", "handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): # Return a method that can be called", "minutes.\", \"I'll take a short break. Ping me when you're back.\", ] def", "idle messages. ''' now = time.time() chat = chat_info[handler.session['id']] if chat['time'] < now", "handler.write_message(chat['bot'].respond(message)) # Note the time of the last message. Reset the idle delay", "about something else. When did you last travel?\", \"Let's meditate for a few", "last message, send an idle message. Then double the delay so that we", "Return a method that can be called periodically to send idle messages. #", "This is doubled after every idle message, and reset when the user responds", "we get here is stored to send future messages. def method(): ''' If", "method(): ''' If delay seconds have elapsed since last message, send an idle", "} chat['callback'].start() def on_message(handler, message): # When we receive a message, respond with", "don't keep sending idle messages. ''' now = time.time() chat = chat_info[handler.session['id']] if", "Stop periodic callback on session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): # Return", "travel?\", \"Let's meditate for a few minutes.\", \"I'll take a short break. Ping", "delay=10) def on_close(handler): # Stop periodic callback on session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session)", "How are you feeling today?') # Set up chat configuration in the session", "reflections from nltk.chat.eliza import pairs chat_info = {} idle_phrases = [ \"Are you", "we don't keep sending idle messages. ''' now = time.time() chat = chat_info[handler.session['id']]", "# Send the next idle message after this many seconds. # This is", "''' If delay seconds have elapsed since last message, send an idle message.", "like to say something?\", \"If you're busy, we can talk later.\", \"What are", "idle_phrases = [ \"Are you still there?\", \"Would you like to say something?\",", "talk later.\", \"What are you thinking?\", \"Got distracted, did you?\", \"Let's change the", "What makes you happy?\", \"Let's talk about something else. When did you last", "PeriodicCallback from nltk.chat.util import Chat, reflections from nltk.chat.eliza import pairs chat_info = {}", "] def open(handler): # Send an introductory message handler.write_message('Hello. How are you feeling", "# Schedule a periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send the next idle", "message, and reset when the user responds 'delay': 10, } chat['callback'].start() def on_message(handler,", "user responds 'delay': 10, } chat['callback'].start() def on_message(handler, message): # When we receive", "from nltk.chat.util import Chat, reflections from nltk.chat.eliza import pairs chat_info = {} idle_phrases", "check 'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send the next idle message after this many", "we receive a message, respond with the chatbot response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message))", "idle message. Then double the delay so that we don't keep sending idle", "you're busy, we can talk later.\", \"What are you thinking?\", \"Got distracted, did", "import Chat, reflections from nltk.chat.eliza import pairs chat_info = {} idle_phrases = [", "in the session chat = chat_info[handler.session['id']] = { # This is the Eliza", "The handler parameter we get here is stored to send future messages. def", "something else. When did you last travel?\", \"Let's meditate for a few minutes.\",", "which the user last sent a message. Used for idle messages 'time': time.time(),", "idle message after this many seconds. # This is doubled after every idle", "time at which the user last sent a message. Used for idle messages", "Chat, reflections from nltk.chat.eliza import pairs chat_info = {} idle_phrases = [ \"Are", "last message. Reset the idle delay time chat.update(time=time.time(), delay=10) def on_close(handler): # Stop", "\"Let's talk about something else. When did you last travel?\", \"Let's meditate for", "Send the next idle message after this many seconds. # This is doubled", "chat_info.pop(session) def idler(handler): # Return a method that can be called periodically to", "seconds have elapsed since last message, send an idle message. Then double the", "= {} idle_phrases = [ \"Are you still there?\", \"Would you like to", "# The time at which the user last sent a message. Used for", "is doubled after every idle message, and reset when the user responds 'delay':", "introductory message handler.write_message('Hello. How are you feeling today?') # Set up chat configuration", "future messages. def method(): ''' If delay seconds have elapsed since last message,", "the time of the last message. Reset the idle delay time chat.update(time=time.time(), delay=10)", "If delay seconds have elapsed since last message, send an idle message. Then", "'time': time.time(), # Schedule a periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send the", "else. When did you last travel?\", \"Let's meditate for a few minutes.\", \"I'll", "you're back.\", ] def open(handler): # Send an introductory message handler.write_message('Hello. How are", "the idle delay time chat.update(time=time.time(), delay=10) def on_close(handler): # Stop periodic callback on", "back.\", ] def open(handler): # Send an introductory message handler.write_message('Hello. How are you", "When did you last travel?\", \"Let's meditate for a few minutes.\", \"I'll take", "# Set up chat configuration in the session chat = chat_info[handler.session['id']] = {", "can be called periodically to send idle messages. # The handler parameter we", "time.time() chat = chat_info[handler.session['id']] if chat['time'] < now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] =", "topic. What makes you happy?\", \"Let's talk about something else. When did you", "message. Reset the idle delay time chat.update(time=time.time(), delay=10) def on_close(handler): # Stop periodic", "callback on session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): # Return a method", "an idle message. Then double the delay so that we don't keep sending", "\"What are you thinking?\", \"Got distracted, did you?\", \"Let's change the topic. What", "is the Eliza bot that will converse with the user 'bot': Chat(pairs, reflections),", "the delay so that we don't keep sending idle messages. ''' now =", "you?\", \"Let's change the topic. What makes you happy?\", \"Let's talk about something", "< now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] = now chat['delay'] = chat['delay'] * 2", "user last sent a message. Used for idle messages 'time': time.time(), # Schedule", "happy?\", \"Let's talk about something else. When did you last travel?\", \"Let's meditate", "will converse with the user 'bot': Chat(pairs, reflections), # The time at which", "you like to say something?\", \"If you're busy, we can talk later.\", \"What", "after this many seconds. # This is doubled after every idle message, and", "take a short break. Ping me when you're back.\", ] def open(handler): #", "break. Ping me when you're back.\", ] def open(handler): # Send an introductory", "you still there?\", \"Would you like to say something?\", \"If you're busy, we", "on session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): # Return a method that", "''' now = time.time() chat = chat_info[handler.session['id']] if chat['time'] < now - chat['delay']:", "did you last travel?\", \"Let's meditate for a few minutes.\", \"I'll take a", "send an idle message. Then double the delay so that we don't keep", "me when you're back.\", ] def open(handler): # Send an introductory message handler.write_message('Hello.", "message after this many seconds. # This is doubled after every idle message,", "after every idle message, and reset when the user responds 'delay': 10, }", "chatbot response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the time of the last", "# This is doubled after every idle message, and reset when the user", "the next idle message after this many seconds. # This is doubled after", "time of the last message. Reset the idle delay time chat.update(time=time.time(), delay=10) def", "= time.time() chat = chat_info[handler.session['id']] if chat['time'] < now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time']", "sending idle messages. ''' now = time.time() chat = chat_info[handler.session['id']] if chat['time'] <", "on_message(handler, message): # When we receive a message, respond with the chatbot response", "the user last sent a message. Used for idle messages 'time': time.time(), #", "import time from random import choice from tornado.ioloop import PeriodicCallback from nltk.chat.util import", "busy, we can talk later.\", \"What are you thinking?\", \"Got distracted, did you?\",", "Chat(pairs, reflections), # The time at which the user last sent a message.", "Send an introductory message handler.write_message('Hello. How are you feeling today?') # Set up", "message): # When we receive a message, respond with the chatbot response chat", "method that can be called periodically to send idle messages. # The handler", "messages. ''' now = time.time() chat = chat_info[handler.session['id']] if chat['time'] < now -", "the session chat = chat_info[handler.session['id']] = { # This is the Eliza bot", "double the delay so that we don't keep sending idle messages. ''' now", "from tornado.ioloop import PeriodicCallback from nltk.chat.util import Chat, reflections from nltk.chat.eliza import pairs", "a periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send the next idle message after", "import pairs chat_info = {} idle_phrases = [ \"Are you still there?\", \"Would", "and reset when the user responds 'delay': 10, } chat['callback'].start() def on_message(handler, message):", "response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the time of the last message.", "Note the time of the last message. Reset the idle delay time chat.update(time=time.time(),", "is stored to send future messages. def method(): ''' If delay seconds have", "the last message. Reset the idle delay time chat.update(time=time.time(), delay=10) def on_close(handler): #", "thinking?\", \"Got distracted, did you?\", \"Let's change the topic. What makes you happy?\",", "you feeling today?') # Set up chat configuration in the session chat =", "session chat = chat_info[handler.session['id']] = { # This is the Eliza bot that", "# Note the time of the last message. Reset the idle delay time", "you happy?\", \"Let's talk about something else. When did you last travel?\", \"Let's", "sent a message. Used for idle messages 'time': time.time(), # Schedule a periodic", "\"If you're busy, we can talk later.\", \"What are you thinking?\", \"Got distracted,", "have elapsed since last message, send an idle message. Then double the delay", "here is stored to send future messages. def method(): ''' If delay seconds", "open(handler): # Send an introductory message handler.write_message('Hello. How are you feeling today?') #", "chat_info[handler.session['id']] = { # This is the Eliza bot that will converse with", "to send idle messages. # The handler parameter we get here is stored", "= chat_info[handler.session['id']] if chat['time'] < now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] = now chat['delay']", "# This is the Eliza bot that will converse with the user 'bot':", "- chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] = now chat['delay'] = chat['delay'] * 2 return method", "the chatbot response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the time of the", "= [ \"Are you still there?\", \"Would you like to say something?\", \"If", "\"Let's change the topic. What makes you happy?\", \"Let's talk about something else.", "chat['callback'].start() def on_message(handler, message): # When we receive a message, respond with the", "respond with the chatbot response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the time", "that will converse with the user 'bot': Chat(pairs, reflections), # The time at", "# Send an introductory message handler.write_message('Hello. How are you feeling today?') # Set", "can talk later.\", \"What are you thinking?\", \"Got distracted, did you?\", \"Let's change", "chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the time of the last message. Reset the idle", "session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): # Return a method that can", "'delay': 10, } chat['callback'].start() def on_message(handler, message): # When we receive a message,", "now = time.time() chat = chat_info[handler.session['id']] if chat['time'] < now - chat['delay']: handler.write_message(choice(idle_phrases))", "= handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): # Return a method that can be", "for a few minutes.\", \"I'll take a short break. Ping me when you're", "change the topic. What makes you happy?\", \"Let's talk about something else. When", "few minutes.\", \"I'll take a short break. Ping me when you're back.\", ]", "the topic. What makes you happy?\", \"Let's talk about something else. When did", "reset when the user responds 'delay': 10, } chat['callback'].start() def on_message(handler, message): #", "= chat_info[handler.session['id']] = { # This is the Eliza bot that will converse", "Ping me when you're back.\", ] def open(handler): # Send an introductory message", "import choice from tornado.ioloop import PeriodicCallback from nltk.chat.util import Chat, reflections from nltk.chat.eliza", "next idle message after this many seconds. # This is doubled after every", "meditate for a few minutes.\", \"I'll take a short break. Ping me when", "this many seconds. # This is doubled after every idle message, and reset", "chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the time of the last message. Reset", "def open(handler): # Send an introductory message handler.write_message('Hello. How are you feeling today?')", "seconds. # This is doubled after every idle message, and reset when the", "later.\", \"What are you thinking?\", \"Got distracted, did you?\", \"Let's change the topic.", "\"Got distracted, did you?\", \"Let's change the topic. What makes you happy?\", \"Let's", "we can talk later.\", \"What are you thinking?\", \"Got distracted, did you?\", \"Let's", "from random import choice from tornado.ioloop import PeriodicCallback from nltk.chat.util import Chat, reflections", "message. Then double the delay so that we don't keep sending idle messages.", "# When we receive a message, respond with the chatbot response chat =", "def on_close(handler): # Stop periodic callback on session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def", "\"I'll take a short break. Ping me when you're back.\", ] def open(handler):", "handler.write_message('Hello. How are you feeling today?') # Set up chat configuration in the", "choice from tornado.ioloop import PeriodicCallback from nltk.chat.util import Chat, reflections from nltk.chat.eliza import", "from nltk.chat.eliza import pairs chat_info = {} idle_phrases = [ \"Are you still", "user 'bot': Chat(pairs, reflections), # The time at which the user last sent", "last sent a message. Used for idle messages 'time': time.time(), # Schedule a", "receive a message, respond with the chatbot response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) #", "Reset the idle delay time chat.update(time=time.time(), delay=10) def on_close(handler): # Stop periodic callback", "that we don't keep sending idle messages. ''' now = time.time() chat =", "message, send an idle message. Then double the delay so that we don't", "nltk.chat.util import Chat, reflections from nltk.chat.eliza import pairs chat_info = {} idle_phrases =", "This is the Eliza bot that will converse with the user 'bot': Chat(pairs,", "= { # This is the Eliza bot that will converse with the", "for idle messages 'time': time.time(), # Schedule a periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000),", "chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): # Return a method that can be called periodically", "\"Would you like to say something?\", \"If you're busy, we can talk later.\",", "When we receive a message, respond with the chatbot response chat = chat_info[handler.session['id']]", "with the user 'bot': Chat(pairs, reflections), # The time at which the user", "the user 'bot': Chat(pairs, reflections), # The time at which the user last", "short break. Ping me when you're back.\", ] def open(handler): # Send an", "periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send the next idle message after this", "idle delay time chat.update(time=time.time(), delay=10) def on_close(handler): # Stop periodic callback on session", "did you?\", \"Let's change the topic. What makes you happy?\", \"Let's talk about", "PeriodicCallback(idler(handler), callback_time=5000), # Send the next idle message after this many seconds. #", "converse with the user 'bot': Chat(pairs, reflections), # The time at which the", "time.time(), # Schedule a periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send the next", "time chat.update(time=time.time(), delay=10) def on_close(handler): # Stop periodic callback on session = handler.session['id']", "chat = chat_info[handler.session['id']] = { # This is the Eliza bot that will", "delay seconds have elapsed since last message, send an idle message. Then double", "delay time chat.update(time=time.time(), delay=10) def on_close(handler): # Stop periodic callback on session =", "last travel?\", \"Let's meditate for a few minutes.\", \"I'll take a short break.", "message handler.write_message('Hello. How are you feeling today?') # Set up chat configuration in", "time from random import choice from tornado.ioloop import PeriodicCallback from nltk.chat.util import Chat,", "today?') # Set up chat configuration in the session chat = chat_info[handler.session['id']] =", "reflections), # The time at which the user last sent a message. Used", "'bot': Chat(pairs, reflections), # The time at which the user last sent a", "callback_time=5000), # Send the next idle message after this many seconds. # This", "on_close(handler): # Stop periodic callback on session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler):", "handler parameter we get here is stored to send future messages. def method():", "when the user responds 'delay': 10, } chat['callback'].start() def on_message(handler, message): # When", "every idle message, and reset when the user responds 'delay': 10, } chat['callback'].start()", "since last message, send an idle message. Then double the delay so that", "feeling today?') # Set up chat configuration in the session chat = chat_info[handler.session['id']]", "at which the user last sent a message. Used for idle messages 'time':", "a short break. Ping me when you're back.\", ] def open(handler): # Send", "'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send the next idle message after this many seconds.", "idle messages. # The handler parameter we get here is stored to send", "to say something?\", \"If you're busy, we can talk later.\", \"What are you", "def idler(handler): # Return a method that can be called periodically to send", "def method(): ''' If delay seconds have elapsed since last message, send an", "import PeriodicCallback from nltk.chat.util import Chat, reflections from nltk.chat.eliza import pairs chat_info =", "messages 'time': time.time(), # Schedule a periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send", "that can be called periodically to send idle messages. # The handler parameter", "the Eliza bot that will converse with the user 'bot': Chat(pairs, reflections), #", "now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] = now chat['delay'] = chat['delay'] * 2 return", "10, } chat['callback'].start() def on_message(handler, message): # When we receive a message, respond", "still there?\", \"Would you like to say something?\", \"If you're busy, we can", "something?\", \"If you're busy, we can talk later.\", \"What are you thinking?\", \"Got", "an introductory message handler.write_message('Hello. How are you feeling today?') # Set up chat", "# Stop periodic callback on session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): #", "periodically to send idle messages. # The handler parameter we get here is", "chat = chat_info[handler.session['id']] if chat['time'] < now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] = now", "the user responds 'delay': 10, } chat['callback'].start() def on_message(handler, message): # When we", "if chat['time'] < now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] = now chat['delay'] = chat['delay']", "there?\", \"Would you like to say something?\", \"If you're busy, we can talk", "chat_info[handler.session['id']] if chat['time'] < now - chat['delay']: handler.write_message(choice(idle_phrases)) chat['time'] = now chat['delay'] =", "Set up chat configuration in the session chat = chat_info[handler.session['id']] = { #", "many seconds. # This is doubled after every idle message, and reset when", "send future messages. def method(): ''' If delay seconds have elapsed since last", "pairs chat_info = {} idle_phrases = [ \"Are you still there?\", \"Would you", "The time at which the user last sent a message. Used for idle", "a method that can be called periodically to send idle messages. # The", "get here is stored to send future messages. def method(): ''' If delay", "keep sending idle messages. ''' now = time.time() chat = chat_info[handler.session['id']] if chat['time']", "idler(handler): # Return a method that can be called periodically to send idle", "# The handler parameter we get here is stored to send future messages.", "tornado.ioloop import PeriodicCallback from nltk.chat.util import Chat, reflections from nltk.chat.eliza import pairs chat_info", "you thinking?\", \"Got distracted, did you?\", \"Let's change the topic. What makes you", "makes you happy?\", \"Let's talk about something else. When did you last travel?\",", "message, respond with the chatbot response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the", "elapsed since last message, send an idle message. Then double the delay so", "a message, respond with the chatbot response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note", "parameter we get here is stored to send future messages. def method(): '''", "\"Let's meditate for a few minutes.\", \"I'll take a short break. Ping me", "responds 'delay': 10, } chat['callback'].start() def on_message(handler, message): # When we receive a", "Eliza bot that will converse with the user 'bot': Chat(pairs, reflections), # The", "you last travel?\", \"Let's meditate for a few minutes.\", \"I'll take a short", "configuration in the session chat = chat_info[handler.session['id']] = { # This is the", "doubled after every idle message, and reset when the user responds 'delay': 10,", "so that we don't keep sending idle messages. ''' now = time.time() chat", "= chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the time of the last message. Reset the", "distracted, did you?\", \"Let's change the topic. What makes you happy?\", \"Let's talk", "stored to send future messages. def method(): ''' If delay seconds have elapsed", "messages. def method(): ''' If delay seconds have elapsed since last message, send", "nltk.chat.eliza import pairs chat_info = {} idle_phrases = [ \"Are you still there?\",", "\"Are you still there?\", \"Would you like to say something?\", \"If you're busy,", "of the last message. Reset the idle delay time chat.update(time=time.time(), delay=10) def on_close(handler):", "delay so that we don't keep sending idle messages. ''' now = time.time()", "{ # This is the Eliza bot that will converse with the user", "to send future messages. def method(): ''' If delay seconds have elapsed since", "talk about something else. When did you last travel?\", \"Let's meditate for a", "idle messages 'time': time.time(), # Schedule a periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000), #", "def on_message(handler, message): # When we receive a message, respond with the chatbot", "random import choice from tornado.ioloop import PeriodicCallback from nltk.chat.util import Chat, reflections from", "[ \"Are you still there?\", \"Would you like to say something?\", \"If you're", "are you feeling today?') # Set up chat configuration in the session chat", "# Return a method that can be called periodically to send idle messages.", "Used for idle messages 'time': time.time(), # Schedule a periodic check 'callback': PeriodicCallback(idler(handler),", "when you're back.\", ] def open(handler): # Send an introductory message handler.write_message('Hello. How", "periodic callback on session = handler.session['id'] chat_info[session]['callback'].stop() chat_info.pop(session) def idler(handler): # Return a", "{} idle_phrases = [ \"Are you still there?\", \"Would you like to say", "are you thinking?\", \"Got distracted, did you?\", \"Let's change the topic. What makes", "message. Used for idle messages 'time': time.time(), # Schedule a periodic check 'callback':", "chat.update(time=time.time(), delay=10) def on_close(handler): # Stop periodic callback on session = handler.session['id'] chat_info[session]['callback'].stop()", "chat_info = {} idle_phrases = [ \"Are you still there?\", \"Would you like", "Schedule a periodic check 'callback': PeriodicCallback(idler(handler), callback_time=5000), # Send the next idle message", "with the chatbot response chat = chat_info[handler.session['id']] handler.write_message(chat['bot'].respond(message)) # Note the time of", "called periodically to send idle messages. # The handler parameter we get here", "send idle messages. # The handler parameter we get here is stored to", "say something?\", \"If you're busy, we can talk later.\", \"What are you thinking?\",", "idle message, and reset when the user responds 'delay': 10, } chat['callback'].start() def", "chat configuration in the session chat = chat_info[handler.session['id']] = { # This is", "a message. Used for idle messages 'time': time.time(), # Schedule a periodic check", "up chat configuration in the session chat = chat_info[handler.session['id']] = { # This" ]
[ "# load checkpoint and delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url,", "progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"] save_path = os.path.join(torch.hub._get_torch_home(), \"detr-r50_no-class-head.pth\") torch.save(checkpoint, save_path)", "mantisshrimp.imports import * def detr_pretrained_checkpoint_base(): # load checkpoint and delete head url =", ") del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"] save_path = os.path.join(torch.hub._get_torch_home(), \"detr-r50_no-class-head.pth\") torch.save(checkpoint, save_path) return save_path", "delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" ) del", "url, progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"] save_path = os.path.join(torch.hub._get_torch_home(), \"detr-r50_no-class-head.pth\") torch.save(checkpoint,", "= \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"]", "[\"detr_pretrained_checkpoint_base\"] from mantisshrimp.imports import * def detr_pretrained_checkpoint_base(): # load checkpoint and delete head", "def detr_pretrained_checkpoint_base(): # load checkpoint and delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint =", "torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"] save_path = os.path.join(torch.hub._get_torch_home(), \"detr-r50_no-class-head.pth\")", "checkpoint and delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\"", "load checkpoint and delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url, progress=False,", "detr_pretrained_checkpoint_base(): # load checkpoint and delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url(", "= torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"] save_path = os.path.join(torch.hub._get_torch_home(),", "\"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"] save_path", "from mantisshrimp.imports import * def detr_pretrained_checkpoint_base(): # load checkpoint and delete head url", "url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del", "map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"] save_path = os.path.join(torch.hub._get_torch_home(), \"detr-r50_no-class-head.pth\") torch.save(checkpoint, save_path) return", "* def detr_pretrained_checkpoint_base(): # load checkpoint and delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint", "checkpoint = torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"] del checkpoint[\"model\"][\"class_embed.bias\"] save_path =", "= [\"detr_pretrained_checkpoint_base\"] from mantisshrimp.imports import * def detr_pretrained_checkpoint_base(): # load checkpoint and delete", "<reponame>ramaneswaran/mantisshrimp __all__ = [\"detr_pretrained_checkpoint_base\"] from mantisshrimp.imports import * def detr_pretrained_checkpoint_base(): # load checkpoint", "__all__ = [\"detr_pretrained_checkpoint_base\"] from mantisshrimp.imports import * def detr_pretrained_checkpoint_base(): # load checkpoint and", "head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" ) del checkpoint[\"model\"][\"class_embed.weight\"]", "and delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\" checkpoint = torch.hub.load_state_dict_from_url( url, progress=False, map_location=\"cpu\" )", "import * def detr_pretrained_checkpoint_base(): # load checkpoint and delete head url = \"https://dl.fbaipublicfiles.com/detr/detr-r50-e632da11.pth\"" ]
[ "device !!\") return onTime = 2 offTime = 2 if len(argv) is 2:", "format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE = \"pro.pid\"", "stopping the device !!\") return onTime = 2 offTime = 2 if len(argv)", "pid: previous_pid = pid.readline() if not len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM) if not", "= \"pro.pid\" def refreshPID(killOnly=False): current_pid = os.getpid() with open(PID_FILE, 'w+') as pid: previous_pid", "with open(PID_FILE, 'w+') as pid: previous_pid = pid.readline() if not len(previous_pid) is 0:", "import sys from .device import Device class Fan(Device): @staticmethod def logTemperature(): process =", "\"pro.pid\" def refreshPID(killOnly=False): current_pid = os.getpid() with open(PID_FILE, 'w+') as pid: previous_pid =", "offTime = float(argv[1]) refreshPID() try: while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt", "== \"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed existing stale process and stopping the device", "----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[", "cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv): fan = Fan(\"Normal Fan\", 11) if len(argv)", "logging.StreamHandler() ]) PID_FILE = \"pro.pid\" def refreshPID(killOnly=False): current_pid = os.getpid() with open(PID_FILE, 'w+')", "True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt occurred, Gracefully", "fan = Fan(\"Normal Fan\", 11) if len(argv) is 1 and argv[0] == \"stop\":", "%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE = \"pro.pid\" def", "killOnly: logging.info( \"Starting A/C controller in PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown()", "process and stopping the device !!\") return onTime = 2 offTime = 2", "\"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\") for temperature_index in range(len(temperatures)): c_temp = temperatures[temperature_index] if", "\"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\") for temperature_index in", "fan.turnOff(offTime) except KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt occurred, Gracefully closing . . .\")", "[ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\")", "not '': logging.info( \"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s',", "{} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"),", "pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv): fan = Fan(\"Normal Fan\", 11)", "Fan\", 11) if len(argv) is 1 and argv[0] == \"stop\": refreshPID(True) cleanup(fan) logging.warning(", "try: while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt", "refreshPID() try: while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as identifier: logging.error(\"Keyboard", "device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv): fan = Fan(\"Normal Fan\", 11) if len(argv) is", "not killOnly: logging.info( \"Starting A/C controller in PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown()", "C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler()", "signal import sys from .device import Device class Fan(Device): @staticmethod def logTemperature(): process", "\"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed existing stale process and stopping the device !!\")", "in range(len(temperatures)): c_temp = temperatures[temperature_index] if c_temp is not '': logging.info( \"{} ---->", "c_temp is not '': logging.info( \"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s:", "stale process and stopping the device !!\") return onTime = 2 offTime =", "/sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones = [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not", "open(PID_FILE, 'w+') as pid: previous_pid = pid.readline() if not len(previous_pid) is 0: os.kill(int(previous_pid),", "if len(argv) is 2: onTime = float(argv[0]) offTime = float(argv[1]) refreshPID() try: while", "occurred, Gracefully closing . . .\") finally: cleanup(fan) if __name__ == \"__main__\": main(sys.argv[1:])", "\"Starting A/C controller in PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def", "from .device import Device class Fan(Device): @staticmethod def logTemperature(): process = os.popen( \"cat", "= os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones = [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\",", "os.kill(int(previous_pid), signal.SIGTERM) if not killOnly: logging.info( \"Starting A/C controller in PID {}\".format(current_pid)) pid.write(str(current_pid))", "= pid.readline() if not len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM) if not killOnly: logging.info(", "import os import signal import sys from .device import Device class Fan(Device): @staticmethod", "= [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\" ] temperatures =", "is 2: onTime = float(argv[0]) offTime = float(argv[1]) refreshPID() try: while True: Fan.logTemperature()", "2 if len(argv) is 2: onTime = float(argv[0]) offTime = float(argv[1]) refreshPID() try:", "is 1 and argv[0] == \"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed existing stale process", "Fan(\"Normal Fan\", 11) if len(argv) is 1 and argv[0] == \"stop\": refreshPID(True) cleanup(fan)", "11) if len(argv) is 1 and argv[0] == \"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed", "\"Killed existing stale process and stopping the device !!\") return onTime = 2", "2: onTime = float(argv[0]) offTime = float(argv[1]) refreshPID() try: while True: Fan.logTemperature() fan.turnOn(onTime)", "Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt occurred, Gracefully closing", "PID_FILE = \"pro.pid\" def refreshPID(killOnly=False): current_pid = os.getpid() with open(PID_FILE, 'w+') as pid:", "] temperatures = stdout.split(\"\\n\") for temperature_index in range(len(temperatures)): c_temp = temperatures[temperature_index] if c_temp", "2 offTime = 2 if len(argv) is 2: onTime = float(argv[0]) offTime =", "= stdout.split(\"\\n\") for temperature_index in range(len(temperatures)): c_temp = temperatures[temperature_index] if c_temp is not", "os.remove(PID_FILE) def main(argv): fan = Fan(\"Normal Fan\", 11) if len(argv) is 1 and", "import logging import os import signal import sys from .device import Device class", "def refreshPID(killOnly=False): current_pid = os.getpid() with open(PID_FILE, 'w+') as pid: previous_pid = pid.readline()", "if not len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM) if not killOnly: logging.info( \"Starting A/C", "\"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p',", "os.getpid() with open(PID_FILE, 'w+') as pid: previous_pid = pid.readline() if not len(previous_pid) is", "\"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\") for temperature_index in range(len(temperatures)):", "interrupt occurred, Gracefully closing . . .\") finally: cleanup(fan) if __name__ == \"__main__\":", "\"PMIC-Die (Not real)\", \"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\") for temperature_index in range(len(temperatures)): c_temp", "logging.info( \"Starting A/C controller in PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE)", "%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE = \"pro.pid\" def refreshPID(killOnly=False):", "zones = [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\" ] temperatures", "onTime = 2 offTime = 2 if len(argv) is 2: onTime = float(argv[0])", "if c_temp is not '': logging.info( \"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG,", "real)\", \"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\") for temperature_index in range(len(temperatures)): c_temp = temperatures[temperature_index]", "\"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\") for temperature_index", "while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt occurred,", "controller in PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv): fan", "logging.info( \"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S", "Fan(Device): @staticmethod def logTemperature(): process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones", "= temperatures[temperature_index] if c_temp is not '': logging.info( \"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000))", "temperatures[temperature_index] if c_temp is not '': logging.info( \"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig(", "%p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE = \"pro.pid\" def refreshPID(killOnly=False): current_pid = os.getpid()", "previous_pid = pid.readline() if not len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM) if not killOnly:", "current_pid = os.getpid() with open(PID_FILE, 'w+') as pid: previous_pid = pid.readline() if not", "offTime = 2 if len(argv) is 2: onTime = float(argv[0]) offTime = float(argv[1])", "if len(argv) is 1 and argv[0] == \"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed existing", "]) PID_FILE = \"pro.pid\" def refreshPID(killOnly=False): current_pid = os.getpid() with open(PID_FILE, 'w+') as", "logging.warning( \"Killed existing stale process and stopping the device !!\") return onTime =", "is not '': logging.info( \"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s", "existing stale process and stopping the device !!\") return onTime = 2 offTime", "%I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE = \"pro.pid\" def refreshPID(killOnly=False): current_pid =", "@staticmethod def logTemperature(): process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones =", "float(argv[0]) offTime = float(argv[1]) refreshPID() try: while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except", "datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE = \"pro.pid\" def refreshPID(killOnly=False): current_pid", "temperatures = stdout.split(\"\\n\") for temperature_index in range(len(temperatures)): c_temp = temperatures[temperature_index] if c_temp is", "signal.SIGTERM) if not killOnly: logging.info( \"Starting A/C controller in PID {}\".format(current_pid)) pid.write(str(current_pid)) def", "#!/usr/bin/env python3 import logging import os import signal import sys from .device import", "logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE = \"pro.pid\" def refreshPID(killOnly=False): current_pid = os.getpid() with open(PID_FILE,", "= float(argv[1]) refreshPID() try: while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as", "not len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM) if not killOnly: logging.info( \"Starting A/C controller", "Device class Fan(Device): @staticmethod def logTemperature(): process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout =", "logTemperature(): process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones = [ \"AO-therm\",", "= 2 if len(argv) is 2: onTime = float(argv[0]) offTime = float(argv[1]) refreshPID()", "len(argv) is 2: onTime = float(argv[0]) offTime = float(argv[1]) refreshPID() try: while True:", ".device import Device class Fan(Device): @staticmethod def logTemperature(): process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\")", "'w+') as pid: previous_pid = pid.readline() if not len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM)", "c_temp = temperatures[temperature_index] if c_temp is not '': logging.info( \"{} ----> {} C\".format(zones[temperature_index],", "and stopping the device !!\") return onTime = 2 offTime = 2 if", "os import signal import sys from .device import Device class Fan(Device): @staticmethod def", "= os.getpid() with open(PID_FILE, 'w+') as pid: previous_pid = pid.readline() if not len(previous_pid)", "temperature_index in range(len(temperatures)): c_temp = temperatures[temperature_index] if c_temp is not '': logging.info( \"{}", "if not killOnly: logging.info( \"Starting A/C controller in PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device):", "A/C controller in PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv):", "return onTime = 2 offTime = 2 if len(argv) is 2: onTime =", "as identifier: logging.error(\"Keyboard interrupt occurred, Gracefully closing . . .\") finally: cleanup(fan) if", "stdout.split(\"\\n\") for temperature_index in range(len(temperatures)): c_temp = temperatures[temperature_index] if c_temp is not '':", "identifier: logging.error(\"Keyboard interrupt occurred, Gracefully closing . . .\") finally: cleanup(fan) if __name__", "stdout = process.read() zones = [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\",", "= float(argv[0]) offTime = float(argv[1]) refreshPID() try: while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime)", "(Not real)\", \"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\") for temperature_index in range(len(temperatures)): c_temp =", "\"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\" ] temperatures = stdout.split(\"\\n\") for", "as pid: previous_pid = pid.readline() if not len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM) if", "onTime = float(argv[0]) offTime = float(argv[1]) refreshPID() try: while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature()", "= process.read() zones = [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\"", "python3 import logging import os import signal import sys from .device import Device", "is 0: os.kill(int(previous_pid), signal.SIGTERM) if not killOnly: logging.info( \"Starting A/C controller in PID", "class Fan(Device): @staticmethod def logTemperature(): process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read()", "main(argv): fan = Fan(\"Normal Fan\", 11) if len(argv) is 1 and argv[0] ==", "fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt occurred, Gracefully closing .", "cleanup(fan) logging.warning( \"Killed existing stale process and stopping the device !!\") return onTime", "def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv): fan = Fan(\"Normal Fan\", 11) if", "= 2 offTime = 2 if len(argv) is 2: onTime = float(argv[0]) offTime", "1 and argv[0] == \"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed existing stale process and", "except KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt occurred, Gracefully closing . . .\") finally:", "the device !!\") return onTime = 2 offTime = 2 if len(argv) is", "logging import os import signal import sys from .device import Device class Fan(Device):", "'': logging.info( \"{} ----> {} C\".format(zones[temperature_index], int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y", "len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM) if not killOnly: logging.info( \"Starting A/C controller in", "sys from .device import Device class Fan(Device): @staticmethod def logTemperature(): process = os.popen(", "= Fan(\"Normal Fan\", 11) if len(argv) is 1 and argv[0] == \"stop\": refreshPID(True)", "int(c_temp)/1000)) logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ])", "logging.error(\"Keyboard interrupt occurred, Gracefully closing . . .\") finally: cleanup(fan) if __name__ ==", "pid.readline() if not len(previous_pid) is 0: os.kill(int(previous_pid), signal.SIGTERM) if not killOnly: logging.info( \"Starting", "refreshPID(killOnly=False): current_pid = os.getpid() with open(PID_FILE, 'w+') as pid: previous_pid = pid.readline() if", "def logTemperature(): process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones = [", "os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones = [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\",", "for temperature_index in range(len(temperatures)): c_temp = temperatures[temperature_index] if c_temp is not '': logging.info(", "import signal import sys from .device import Device class Fan(Device): @staticmethod def logTemperature():", "!!\") return onTime = 2 offTime = 2 if len(argv) is 2: onTime", "logging.basicConfig( level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE", "def main(argv): fan = Fan(\"Normal Fan\", 11) if len(argv) is 1 and argv[0]", "Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt occurred, Gracefully closing . .", "\"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones = [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die", "level=logging.DEBUG, format='%(levelname)s: %(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p', handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE =", "handlers=[ logging.FileHandler(\"test.log\"), logging.StreamHandler() ]) PID_FILE = \"pro.pid\" def refreshPID(killOnly=False): current_pid = os.getpid() with", "{}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv): fan = Fan(\"Normal Fan\",", "len(argv) is 1 and argv[0] == \"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed existing stale", "process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout = process.read() zones = [ \"AO-therm\", \"CPU-therm\",", "0: os.kill(int(previous_pid), signal.SIGTERM) if not killOnly: logging.info( \"Starting A/C controller in PID {}\".format(current_pid))", "logging.shutdown() os.remove(PID_FILE) def main(argv): fan = Fan(\"Normal Fan\", 11) if len(argv) is 1", "float(argv[1]) refreshPID() try: while True: Fan.logTemperature() fan.turnOn(onTime) Fan.logTemperature() fan.turnOff(offTime) except KeyboardInterrupt as identifier:", "and argv[0] == \"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed existing stale process and stopping", "in PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv): fan =", "PID {}\".format(current_pid)) pid.write(str(current_pid)) def cleanup(device): device.shutdown() logging.shutdown() os.remove(PID_FILE) def main(argv): fan = Fan(\"Normal", "KeyboardInterrupt as identifier: logging.error(\"Keyboard interrupt occurred, Gracefully closing . . .\") finally: cleanup(fan)", "refreshPID(True) cleanup(fan) logging.warning( \"Killed existing stale process and stopping the device !!\") return", "range(len(temperatures)): c_temp = temperatures[temperature_index] if c_temp is not '': logging.info( \"{} ----> {}", "import Device class Fan(Device): @staticmethod def logTemperature(): process = os.popen( \"cat /sys/devices/virtual/thermal/thermal_zone*/temp\") stdout", "process.read() zones = [ \"AO-therm\", \"CPU-therm\", \"GPU-therm\", \"PLL-therm\", \"PMIC-Die (Not real)\", \"thermal-fan-est\" ]", "argv[0] == \"stop\": refreshPID(True) cleanup(fan) logging.warning( \"Killed existing stale process and stopping the" ]
[ "key=lambda x: x[-1]) # sort test_set by predicted performance return test_set[predicted_sorted[0][0]] # the", "\"\"\" for each configuration, we create a config_node object to save its informations", "optimal configuration in unevaluated set, then remove it from unevaluated set to training", "step1: read from csv file pdcontent = pd.read_csv(csv_file) attr_list = pdcontent.columns # all", "i in attr_list if \"$<\" not in i] perfs = [i for i", "= range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset = [configs[i] for i in train_index] #", "print(\"\\n-------------\") for i in uneval_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") ####################################################################################### lowest_rank = find_lowest_rank(train_set,", "train_set = dataset[:size] unevaluated_set = dataset for config in train_set: unevaluated_set = remove_by_index(unevaluated_set,", "split\") train_set = data[0] uneval_set = data[1] for i in train_set: print(str(i.index), \",\",", "# Update: 07/16/2018 \"\"\" Flash, proposed by Nair et al. (arXiv '18), which", "\"Finding Faster Configurations using FLASH\". \"\"\" import pandas as pd import random as", "predicted value:\", sf[1], \" predicted rank:\", sf[2]) # print(\"-------------\") return np.min([sf[0] for sf", "for i,p in enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # sort test_set", "to 50 optimal_config = predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index)", "in config_pool: if config.index == index: config_pool.remove(config) break return config_pool def find_lowest_rank(train_set, test_set):", "for i in train_index] # print(len(dataset)) return dataset def predict_by_flash(dataset, size=30, budget=50): \"\"\"", "in train_index] # print(len(dataset)) return dataset def predict_by_flash(dataset, size=30, budget=50): \"\"\" use the", "dataset STEP 2: split the dataset into training set (30 configs) and unevaluated", "train_set] # test data test_perfs = [t.features for t in sorted_test] cart_model =", "a best model, return the train_set and unevaluated_set \"\"\" #initilize the train set", "__init__(self, index, features, perfs, predicted): self.index = index self.features = features self.perfs =", "performance \"\"\" def __init__(self, index, features, perfs, predicted): self.index = index self.features =", "set, then remove it from unevaluated set to training set. STEP 4: repeat", "save its informations index : actual rank features : feature list perfs :", "(remaining configs) STEP 3: predict the optimal configuration in unevaluated set, then remove", "from sklearn.tree import DecisionTreeRegressor class config_node: \"\"\" for each configuration, we create a", "\"\"\" split data set and return the 80% data \"\"\" # step1: read", "is loss out. The details of Progressive are introduced in paper \"Finding Faster", "budget=50): \"\"\" use the budget in dataset to train a best model, return", "print(\"actual rank:\", sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1], \" predicted", "STEP 4: repeat the STEP 4 until the budget (50 configs) is loss", "which aims to find the (near) optimal configuration in unevaluated set. STEP 1:", "configuration, we create a config_node object to save its informations index : actual", "p in enumerate(predicted)] # i-> actual rank, p -> predicted value predicted_sorted =", "\"$<\" not in i] perfs = [i for i in attr_list if \"$<\"", "if __name__ == \"__main__\": ####################################################################################### # select 80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8)", "optimal_config.index) train_set.append(optimal_config) budget = budget - 1 return [train_set, unevaluated_set] if __name__ ==", "split # fraction = 0.4 # split fraction # rd.seed(seed) # random seed", "# print(\"actual rank:\", sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1], \"", "perfs self.predicted = predicted def remove_by_index(config_pool, index): \"\"\" remove the selected configuration \"\"\"", "predicted def remove_by_index(config_pool, index): \"\"\" remove the selected configuration \"\"\" for config in", "for t in train_set] # test data test_perfs = [t.features for t in", "assigning predicted ranks predicted_rank_sorted = [[p[0], p[-1], i] for i,p in enumerate(predicted_sorted)] #", "print(len(dataset)) return dataset def predict_by_flash(dataset, size=30, budget=50): \"\"\" use the budget in dataset", "p -> predicted value predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # print(predicted_sorted) #", "print(\"### initialzation\") for i in dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset)", "actual performance \"\"\" def __init__(self, index, features, perfs, predicted): self.index = index self.features", "in select_few]) def predict_by_cart(train_set, test_set): \"\"\" return the predicted optimal condiguration \"\"\" train_features", "test_set): \"\"\" return the lowest rank in top 10 \"\"\" sorted_test = sorted(test_set,", "Update: 07/16/2018 \"\"\" Flash, proposed by Nair et al. (arXiv '18), which aims", "1: select 80%% of original data as dataset STEP 2: split the dataset", "dataset[:size] unevaluated_set = dataset for config in train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index) #", "# print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget = budget - 1", "= [t.features for t in sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted =", "\"-\", config.predicted, \"-\", config.rank) # step4: data split # fraction = 0.4 #", "predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # print(predicted_sorted) # assigning predicted ranks predicted_rank_sorted", "30 configurations rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set = dataset for config in train_set:", "2: split the dataset into training set (30 configs) and unevaluated set (remaining", "enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # sort test_set by predicted performance", "= [[i, p] for i, p in enumerate(predicted)] # i-> actual rank, p", "rd.seed(seed) # random seed rd.shuffle(configs) # shuffle the configs indexes = range(len(configs)) train_index", "the predicted optimal condiguration \"\"\" train_features = [config.features for config in train_set] train_perfs", "STEP 3: predict the optimal configuration in unevaluated set, then remove it from", "= predict_by_flash(dataset) print(\"### finally split\") train_set = data[0] uneval_set = data[1] for i", "test_set by predicted performance return test_set[predicted_sorted[0][0]] # the optimal configuration def split_data_by_fraction(csv_file, fraction):", "[configs[i] for i in train_index] # print(len(dataset)) return dataset def predict_by_flash(dataset, size=30, budget=50):", "in enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # sort test_set by predicted", "STEP 1: select 80%% of original data as dataset STEP 2: split the", "sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs) predicted_id = [[i, p]", "test_set[predicted_sorted[0][0]] # the optimal configuration def split_data_by_fraction(csv_file, fraction): \"\"\" split data set and", "best model, return the train_set and unevaluated_set \"\"\" #initilize the train set with", "rank features : feature list perfs : actual performance \"\"\" def __init__(self, index,", "return the train_set and unevaluated_set \"\"\" #initilize the train set with 30 configurations", "in unevaluated set, then remove it from unevaluated set to training set. STEP", "# rd.seed(seed) # random seed rd.shuffle(configs) # shuffle the configs indexes = range(len(configs))", "cart_model.predict(test_perfs) predicted_id = [[i, p] for i, p in enumerate(predicted)] # i-> actual", "random seed rd.shuffle(configs) # shuffle the configs indexes = range(len(configs)) train_index = indexes[:int(fraction*len(configs))]", "find_lowest_rank(train_set, test_set): \"\"\" return the lowest rank in top 10 \"\"\" sorted_test =", "in dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"### finally split\") train_set", "# train data train_features = [t.features for t in train_set] train_perfs = [t.perfs[-1]", "train_features = [config.features for config in train_set] train_perfs = [config.perfs[-1] for config in", "configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(),", "to train a best model, return the train_set and unevaluated_set \"\"\" #initilize the", "predicted ranks predicted_rank_sorted = [[p[0], p[-1], i] for i,p in enumerate(predicted_sorted)] # p[0]", "# p[0] -> actual rank, p[-1] -> perdicted value, i -> predicted rank", "def split_data_by_fraction(csv_file, fraction): \"\"\" split data set and return the 80% data \"\"\"", "it from unevaluated set to training set. STEP 4: repeat the STEP 4", "i] for i,p in enumerate(predicted_sorted)] # p[0] -> actual rank, p[-1] -> perdicted", "\"__main__\": ####################################################################################### # select 80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for", "if config.index == index: config_pool.remove(config) break return config_pool def find_lowest_rank(train_set, test_set): \"\"\" return", "i -> predicted rank select_few = predicted_rank_sorted[:10] # print the predcited top-10 configuration", "train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for i in uneval_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\")", "\"\"\" train_features = [config.features for config in train_set] train_perfs = [config.perfs[-1] for config", "print(\"-------------\") return np.min([sf[0] for sf in select_few]) def predict_by_cart(train_set, test_set): \"\"\" return the", "config.predicted, \"-\", config.rank) # step4: data split # fraction = 0.4 # split", "set. STEP 1: select 80%% of original data as dataset STEP 2: split", "repeat the STEP 4 until the budget (50 configs) is loss out. The", "import random as rd import numpy as np from sklearn.tree import DecisionTreeRegressor class", "value, i -> predicted rank select_few = predicted_rank_sorted[:10] # print the predcited top-10", "train_perfs) predicted = cart_model.predict(test_perfs) predicted_id = [[i, p] for i, p in enumerate(predicted)]", "#initilize the train set with 30 configurations rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set =", "configs: # print(config.index, \"-\", config.perfs, \"-\", config.predicted, \"-\", config.rank) # step4: data split", "= predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget =", "finally split\") train_set = data[0] uneval_set = data[1] for i in train_set: print(str(i.index),", "train_perfs = [t.perfs[-1] for t in train_set] # test data test_perfs = [t.features", "train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index) # remove train_set while budget >= 0: #", "unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget = budget - 1 return [train_set, unevaluated_set]", "data test_perfs = [t.features for t in sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs)", "actual rank, p -> predicted value predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) #", "= 0.4 # split fraction # rd.seed(seed) # random seed rd.shuffle(configs) # shuffle", "test data test_perfs = [t.features for t in sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features,", "print(\"### finally split\") train_set = data[0] uneval_set = data[1] for i in train_set:", "if \"$<\" in i] sortedcontent = pdcontent.sort_values(perfs[-1]) # from small to big #", "3: predict the optimal configuration in unevaluated set, then remove it from unevaluated", "key=lambda x: x[-1]) # print(predicted_sorted) # assigning predicted ranks predicted_rank_sorted = [[p[0], p[-1],", "for sf in select_few: # print(\"actual rank:\", sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1], \"", "optimal_config = predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget", "config.index == index: config_pool.remove(config) break return config_pool def find_lowest_rank(train_set, test_set): \"\"\" return the", "end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"### finally split\") train_set = data[0] uneval_set =", "train_perfs = [config.perfs[-1] for config in train_set] test_features = [config.features for config in", "dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for i in dataset: print(str(i.index), \",\", end=\"\")", "\"\"\" #initilize the train set with 30 configurations rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set", "cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features) predicted_id = [[i,p] for i,p in enumerate(predicted)] predicted_sorted", "(30 configs) and unevaluated set (remaining configs) STEP 3: predict the optimal configuration", "x: x.perfs[-1]) # train data train_features = [t.features for t in train_set] train_perfs", "in paper \"Finding Faster Configurations using FLASH\". \"\"\" import pandas as pd import", "\"-\", config.perfs, \"-\", config.predicted, \"-\", config.rank) # step4: data split # fraction =", "def find_lowest_rank(train_set, test_set): \"\"\" return the lowest rank in top 10 \"\"\" sorted_test", "list )) # for config in configs: # print(config.index, \"-\", config.perfs, \"-\", config.predicted,", "object to save its informations index : actual rank features : feature list", "self.predicted = predicted def remove_by_index(config_pool, index): \"\"\" remove the selected configuration \"\"\" for", "i in train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for i in uneval_set: print(str(i.index), \",\",", "predcited top-10 configuration # for sf in select_few: # print(\"actual rank:\", sf[0], \"", "shuffle the configs indexes = range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset = [configs[i] for", "-> perdicted value, i -> predicted rank select_few = predicted_rank_sorted[:10] # print the", "return config_pool def find_lowest_rank(train_set, test_set): \"\"\" return the lowest rank in top 10", "\"\"\" remove the selected configuration \"\"\" for config in config_pool: if config.index ==", "1 features = [i for i in attr_list if \"$<\" not in i]", "split the dataset into training set (30 configs) and unevaluated set (remaining configs)", "informations index : actual rank features : feature list perfs : actual performance", "def predict_by_cart(train_set, test_set): \"\"\" return the predicted optimal condiguration \"\"\" train_features = [config.features", "10 \"\"\" sorted_test = sorted(test_set, key=lambda x: x.perfs[-1]) # train data train_features =", "in enumerate(predicted)] # i-> actual rank, p -> predicted value predicted_sorted = sorted(predicted_id,", "rank:\", sf[2]) # print(\"-------------\") return np.min([sf[0] for sf in select_few]) def predict_by_cart(train_set, test_set):", "Faster Configurations using FLASH\". \"\"\" import pandas as pd import random as rd", "predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget = budget", "= cart_model.predict(test_perfs) predicted_id = [[i, p] for i, p in enumerate(predicted)] # i->", "training set. STEP 4: repeat the STEP 4 until the budget (50 configs)", "split_data_by_fraction(csv_file, fraction): \"\"\" split data set and return the 80% data \"\"\" #", "the train_set and unevaluated_set \"\"\" #initilize the train set with 30 configurations rd.shuffle(dataset)", "\" predicted value:\", sf[1], \" predicted rank:\", sf[2]) # print(\"-------------\") return np.min([sf[0] for", "predicted rank:\", sf[2]) # print(\"-------------\") return np.min([sf[0] for sf in select_few]) def predict_by_cart(train_set,", "print the predcited top-10 configuration # for sf in select_few: # print(\"actual rank:\",", "= [[i,p] for i,p in enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) #", "# budget equals to 50 optimal_config = predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set", "for i in train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for i in uneval_set: print(str(i.index),", "i] perfs = [i for i in attr_list if \"$<\" in i] sortedcontent", "i,p in enumerate(predicted_sorted)] # p[0] -> actual rank, p[-1] -> perdicted value, i", "config_pool.remove(config) break return config_pool def find_lowest_rank(train_set, test_set): \"\"\" return the lowest rank in", "using FLASH\". \"\"\" import pandas as pd import random as rd import numpy", "the STEP 4 until the budget (50 configs) is loss out. The details", "for i in attr_list if \"$<\" not in i] perfs = [i for", "# actual rank sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(), #", "in top 10 \"\"\" sorted_test = sorted(test_set, key=lambda x: x.perfs[-1]) # train data", "feature list # step2: split attribute - method 1 features = [i for", "# print the predcited top-10 configuration # for sf in select_few: # print(\"actual", "train_features = [t.features for t in train_set] train_perfs = [t.perfs[-1] for t in", "x[-1]) # print(predicted_sorted) # assigning predicted ranks predicted_rank_sorted = [[p[0], p[-1], i] for", "al. (arXiv '18), which aims to find the (near) optimal configuration in unevaluated", "from unevaluated set to training set. STEP 4: repeat the STEP 4 until", "\"\"\" Flash, proposed by Nair et al. (arXiv '18), which aims to find", "sorted(predicted_id, key=lambda x: x[-1]) # sort test_set by predicted performance return test_set[predicted_sorted[0][0]] #", "# step3: collect configuration configs = list() for c in range(len(pdcontent)): configs.append(config_node(c, #", "# print(len(dataset)) return dataset def predict_by_flash(dataset, size=30, budget=50): \"\"\" use the budget in", "in configs: # print(config.index, \"-\", config.perfs, \"-\", config.predicted, \"-\", config.rank) # step4: data", "file pdcontent = pd.read_csv(csv_file) attr_list = pdcontent.columns # all feature list # step2:", "config.index) # remove train_set while budget >= 0: # budget equals to 50", "50 optimal_config = predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config)", "configuration in unevaluated set. STEP 1: select 80%% of original data as dataset", "= data[1] for i in train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for i in", "budget in dataset to train a best model, return the train_set and unevaluated_set", "index, features, perfs, predicted): self.index = index self.features = features self.perfs = perfs", "test_features = [config.features for config in test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted", "cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs) predicted_id = [[i, p] for", "predicted value predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # print(predicted_sorted) # assigning predicted", "\"\"\" # step1: read from csv file pdcontent = pd.read_csv(csv_file) attr_list = pdcontent.columns", "enumerate(predicted_sorted)] # p[0] -> actual rank, p[-1] -> perdicted value, i -> predicted", "index : actual rank features : feature list perfs : actual performance \"\"\"", "DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features) predicted_id = [[i,p] for i,p in enumerate(predicted)]", "= [i for i in attr_list if \"$<\" not in i] perfs =", "performance return test_set[predicted_sorted[0][0]] # the optimal configuration def split_data_by_fraction(csv_file, fraction): \"\"\" split data", "config_node object to save its informations index : actual rank features : feature", "config.perfs, \"-\", config.predicted, \"-\", config.rank) # step4: data split # fraction = 0.4", "key=lambda x: x.perfs[-1]) # train data train_features = [t.features for t in train_set]", "for t in sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs) predicted_id", "to training set. STEP 4: repeat the STEP 4 until the budget (50", "rank:\", sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1], \" predicted rank:\",", "unevaluated_set \"\"\" #initilize the train set with 30 configurations rd.shuffle(dataset) train_set = dataset[:size]", "configuration # for sf in select_few: # print(\"actual rank:\", sf[0], \" actual value:\",", "return test_set[predicted_sorted[0][0]] # the optimal configuration def split_data_by_fraction(csv_file, fraction): \"\"\" split data set", "np.min([sf[0] for sf in select_few]) def predict_by_cart(train_set, test_set): \"\"\" return the predicted optimal", "model, return the train_set and unevaluated_set \"\"\" #initilize the train set with 30", "the selected configuration \"\"\" for config in config_pool: if config.index == index: config_pool.remove(config)", "[config.perfs[-1] for config in train_set] test_features = [config.features for config in test_set] cart_model", "condiguration \"\"\" train_features = [config.features for config in train_set] train_perfs = [config.perfs[-1] for", "in dataset to train a best model, return the train_set and unevaluated_set \"\"\"", "-> predicted value predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # print(predicted_sorted) # assigning", "attribute - method 1 features = [i for i in attr_list if \"$<\"", "for i in attr_list if \"$<\" in i] sortedcontent = pdcontent.sort_values(perfs[-1]) # from", "the lowest rank in top 10 \"\"\" sorted_test = sorted(test_set, key=lambda x: x.perfs[-1])", "# from small to big # print(len(sortedcontent)) # step3: collect configuration configs =", "= dataset[:size] unevaluated_set = dataset for config in train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index)", "split fraction # rd.seed(seed) # random seed rd.shuffle(configs) # shuffle the configs indexes", "original data as dataset STEP 2: split the dataset into training set (30", "The details of Progressive are introduced in paper \"Finding Faster Configurations using FLASH\".", ": feature list perfs : actual performance \"\"\" def __init__(self, index, features, perfs,", "optimal configuration in unevaluated set. STEP 1: select 80%% of original data as", "t in train_set] # test data test_perfs = [t.features for t in sorted_test]", "ranks predicted_rank_sorted = [[p[0], p[-1], i] for i,p in enumerate(predicted_sorted)] # p[0] ->", "i, p in enumerate(predicted)] # i-> actual rank, p -> predicted value predicted_sorted", "unevaluated_set] if __name__ == \"__main__\": ####################################################################################### # select 80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\",", "of Progressive are introduced in paper \"Finding Faster Configurations using FLASH\". \"\"\" import", "remove train_set while budget >= 0: # budget equals to 50 optimal_config =", "the budget (50 configs) is loss out. The details of Progressive are introduced", "for sf in select_few]) def predict_by_cart(train_set, test_set): \"\"\" return the predicted optimal condiguration", "its informations index : actual rank features : feature list perfs : actual", "configuration in unevaluated set, then remove it from unevaluated set to training set.", "= [config.features for config in test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted =", "\"\"\" sorted_test = sorted(test_set, key=lambda x: x.perfs[-1]) # train data train_features = [t.features", "paper \"Finding Faster Configurations using FLASH\". \"\"\" import pandas as pd import random", "predicted performance return test_set[predicted_sorted[0][0]] # the optimal configuration def split_data_by_fraction(csv_file, fraction): \"\"\" split", "and unevaluated_set \"\"\" #initilize the train set with 30 configurations rd.shuffle(dataset) train_set =", "data set and return the 80% data \"\"\" # step1: read from csv", "return the lowest rank in top 10 \"\"\" sorted_test = sorted(test_set, key=lambda x:", "= predicted_rank_sorted[:10] # print the predcited top-10 configuration # for sf in select_few:", "= dataset for config in train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index) # remove train_set", "rank, p[-1] -> perdicted value, i -> predicted rank select_few = predicted_rank_sorted[:10] #", "return the 80% data \"\"\" # step1: read from csv file pdcontent =", "print(predicted_sorted) # assigning predicted ranks predicted_rank_sorted = [[p[0], p[-1], i] for i,p in", "split attribute - method 1 features = [i for i in attr_list if", "pd import random as rd import numpy as np from sklearn.tree import DecisionTreeRegressor", "import numpy as np from sklearn.tree import DecisionTreeRegressor class config_node: \"\"\" for each", "for i in dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"### finally", "create a config_node object to save its informations index : actual rank features", "unevaluated_set = remove_by_index(unevaluated_set, config.index) # remove train_set while budget >= 0: # budget", "\"\"\" for config in config_pool: if config.index == index: config_pool.remove(config) break return config_pool", "= list() for c in range(len(pdcontent)): configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(), # feature", "def remove_by_index(config_pool, index): \"\"\" remove the selected configuration \"\"\" for config in config_pool:", "config_node: \"\"\" for each configuration, we create a config_node object to save its", "= features self.perfs = perfs self.predicted = predicted def remove_by_index(config_pool, index): \"\"\" remove", "cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs) predicted_id = [[i, p] for i, p in", "performance list sortedcontent.iloc[c][perfs].tolist(), # predicted performance list )) # for config in configs:", "4 until the budget (50 configs) is loss out. The details of Progressive", "step4: data split # fraction = 0.4 # split fraction # rd.seed(seed) #", "configurations rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set = dataset for config in train_set: unevaluated_set", "remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget = budget - 1 return [train_set, unevaluated_set] if __name__", "sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(), # predicted performance list )) # for config", ">= 0: # budget equals to 50 optimal_config = predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\",", "config_pool def find_lowest_rank(train_set, test_set): \"\"\" return the lowest rank in top 10 \"\"\"", "import DecisionTreeRegressor class config_node: \"\"\" for each configuration, we create a config_node object", "= [t.features for t in train_set] train_perfs = [t.perfs[-1] for t in train_set]", "0.8) print(\"### initialzation\") for i in dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data =", "Flash, proposed by Nair et al. (arXiv '18), which aims to find the", "# remove train_set while budget >= 0: # budget equals to 50 optimal_config", "return dataset def predict_by_flash(dataset, size=30, budget=50): \"\"\" use the budget in dataset to", "data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for i in dataset: print(str(i.index), \",\",", "index self.features = features self.perfs = perfs self.predicted = predicted def remove_by_index(config_pool, index):", "test_perfs = [t.features for t in sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted", "attr_list if \"$<\" not in i] perfs = [i for i in attr_list", "# for sf in select_few: # print(\"actual rank:\", sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1],", "\"\"\" def __init__(self, index, features, perfs, predicted): self.index = index self.features = features", "train_set] test_features = [config.features for config in test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs)", "# print(config.index, \"-\", config.perfs, \"-\", config.predicted, \"-\", config.rank) # step4: data split #", "# print(\"-------------\") return np.min([sf[0] for sf in select_few]) def predict_by_cart(train_set, test_set): \"\"\" return", "pdcontent.sort_values(perfs[-1]) # from small to big # print(len(sortedcontent)) # step3: collect configuration configs", "self.perfs = perfs self.predicted = predicted def remove_by_index(config_pool, index): \"\"\" remove the selected", "0.4 # split fraction # rd.seed(seed) # random seed rd.shuffle(configs) # shuffle the", "FLASH\". \"\"\" import pandas as pd import random as rd import numpy as", "predicted optimal condiguration \"\"\" train_features = [config.features for config in train_set] train_perfs =", "for i in uneval_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") ####################################################################################### lowest_rank = find_lowest_rank(train_set, uneval_set)", "# i-> actual rank, p -> predicted value predicted_sorted = sorted(predicted_id, key=lambda x:", "by Nair et al. (arXiv '18), which aims to find the (near) optimal", "in test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features) predicted_id = [[i,p]", "# feature list sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(), # predicted performance list ))", "80% data \"\"\" # step1: read from csv file pdcontent = pd.read_csv(csv_file) attr_list", "-> predicted rank select_few = predicted_rank_sorted[:10] # print the predcited top-10 configuration #", "fraction): \"\"\" split data set and return the 80% data \"\"\" # step1:", "predicted = cart_model.predict(test_perfs) predicted_id = [[i, p] for i, p in enumerate(predicted)] #", "config.rank) # step4: data split # fraction = 0.4 # split fraction #", "predicted_id = [[i, p] for i, p in enumerate(predicted)] # i-> actual rank,", "step3: collect configuration configs = list() for c in range(len(pdcontent)): configs.append(config_node(c, # actual", "list perfs : actual performance \"\"\" def __init__(self, index, features, perfs, predicted): self.index", "= pdcontent.sort_values(perfs[-1]) # from small to big # print(len(sortedcontent)) # step3: collect configuration", "# sort test_set by predicted performance return test_set[predicted_sorted[0][0]] # the optimal configuration def", "sortedcontent.iloc[c][perfs].tolist(), # predicted performance list )) # for config in configs: # print(config.index,", "index: config_pool.remove(config) break return config_pool def find_lowest_rank(train_set, test_set): \"\"\" return the lowest rank", "unevaluated_set = dataset for config in train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index) # remove", "budget >= 0: # budget equals to 50 optimal_config = predict_by_cart(train_set, unevaluated_set) #", "Progressive are introduced in paper \"Finding Faster Configurations using FLASH\". \"\"\" import pandas", ")) # for config in configs: # print(config.index, \"-\", config.perfs, \"-\", config.predicted, \"-\",", "config in train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index) # remove train_set while budget >=", "t in train_set] train_perfs = [t.perfs[-1] for t in train_set] # test data", "np from sklearn.tree import DecisionTreeRegressor class config_node: \"\"\" for each configuration, we create", "range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset = [configs[i] for i in train_index] # print(len(dataset))", "\" actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1], \" predicted rank:\", sf[2]) #", "= indexes[:int(fraction*len(configs))] dataset = [configs[i] for i in train_index] # print(len(dataset)) return dataset", "= [config.features for config in train_set] train_perfs = [config.perfs[-1] for config in train_set]", "for i,p in enumerate(predicted_sorted)] # p[0] -> actual rank, p[-1] -> perdicted value,", "budget equals to 50 optimal_config = predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set =", "remove_by_index(config_pool, index): \"\"\" remove the selected configuration \"\"\" for config in config_pool: if", "= [[p[0], p[-1], i] for i,p in enumerate(predicted_sorted)] # p[0] -> actual rank,", "i-> actual rank, p -> predicted value predicted_sorted = sorted(predicted_id, key=lambda x: x[-1])", "- 1 return [train_set, unevaluated_set] if __name__ == \"__main__\": ####################################################################################### # select 80%", "print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget = budget - 1 return", "as pd import random as rd import numpy as np from sklearn.tree import", "80%% of original data as dataset STEP 2: split the dataset into training", "predict the optimal configuration in unevaluated set, then remove it from unevaluated set", "unevaluated set. STEP 1: select 80%% of original data as dataset STEP 2:", "features, perfs, predicted): self.index = index self.features = features self.perfs = perfs self.predicted", "optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget = budget - 1 return [train_set,", "while budget >= 0: # budget equals to 50 optimal_config = predict_by_cart(train_set, unevaluated_set)", "features self.perfs = perfs self.predicted = predicted def remove_by_index(config_pool, index): \"\"\" remove the", "equals to 50 optimal_config = predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set,", "# step2: split attribute - method 1 features = [i for i in", "dataset for config in train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index) # remove train_set while", "data[0] uneval_set = data[1] for i in train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for", "select_few]) def predict_by_cart(train_set, test_set): \"\"\" return the predicted optimal condiguration \"\"\" train_features =", "dataset to train a best model, return the train_set and unevaluated_set \"\"\" #initilize", "in range(len(pdcontent)): configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(), # performance", "i in train_index] # print(len(dataset)) return dataset def predict_by_flash(dataset, size=30, budget=50): \"\"\" use", "the optimal configuration in unevaluated set, then remove it from unevaluated set to", "remove it from unevaluated set to training set. STEP 4: repeat the STEP", "[t.features for t in sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs)", "\",\", end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"### finally split\") train_set = data[0] uneval_set", "= [config.perfs[-1] for config in train_set] test_features = [config.features for config in test_set]", "actual rank sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(), # predicted", "dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"### finally split\") train_set =", "for t in train_set] train_perfs = [t.perfs[-1] for t in train_set] # test", "p[-1], i] for i,p in enumerate(predicted_sorted)] # p[0] -> actual rank, p[-1] ->", "test_set): \"\"\" return the predicted optimal condiguration \"\"\" train_features = [config.features for config", "configs) is loss out. The details of Progressive are introduced in paper \"Finding", "data = predict_by_flash(dataset) print(\"### finally split\") train_set = data[0] uneval_set = data[1] for", "sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1], \" predicted rank:\", sf[2])", "pandas as pd import random as rd import numpy as np from sklearn.tree", "of original data as dataset STEP 2: split the dataset into training set", "data train_features = [t.features for t in train_set] train_perfs = [t.perfs[-1] for t", "= [t.perfs[-1] for t in train_set] # test data test_perfs = [t.features for", "set with 30 configurations rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set = dataset for config", "sorted(predicted_id, key=lambda x: x[-1]) # print(predicted_sorted) # assigning predicted ranks predicted_rank_sorted = [[p[0],", "loss out. The details of Progressive are introduced in paper \"Finding Faster Configurations", "STEP 4 until the budget (50 configs) is loss out. The details of", "def __init__(self, index, features, perfs, predicted): self.index = index self.features = features self.perfs", "self.features = features self.perfs = perfs self.predicted = predicted def remove_by_index(config_pool, index): \"\"\"", "predicted rank select_few = predicted_rank_sorted[:10] # print the predcited top-10 configuration # for", "in i] perfs = [i for i in attr_list if \"$<\" in i]", "in train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for i in uneval_set: print(str(i.index), \",\", end=\"\")", "then remove it from unevaluated set to training set. STEP 4: repeat the", "sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(), # predicted performance list", "t in sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs) predicted_id =", "remove the selected configuration \"\"\" for config in config_pool: if config.index == index:", "= [i for i in attr_list if \"$<\" in i] sortedcontent = pdcontent.sort_values(perfs[-1])", "predicted_rank_sorted = [[p[0], p[-1], i] for i,p in enumerate(predicted_sorted)] # p[0] -> actual", "# split fraction # rd.seed(seed) # random seed rd.shuffle(configs) # shuffle the configs", "# coding=utf-8 # Author: youngfeng # Update: 07/16/2018 \"\"\" Flash, proposed by Nair", "read from csv file pdcontent = pd.read_csv(csv_file) attr_list = pdcontent.columns # all feature", "config in test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features) predicted_id =", "method 1 features = [i for i in attr_list if \"$<\" not in", "i in dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"### finally split\")", "seed rd.shuffle(configs) # shuffle the configs indexes = range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset", "# Author: youngfeng # Update: 07/16/2018 \"\"\" Flash, proposed by Nair et al.", "sorted(test_set, key=lambda x: x.perfs[-1]) # train data train_features = [t.features for t in", "# predicted performance list )) # for config in configs: # print(config.index, \"-\",", "\"-\", config.rank) # step4: data split # fraction = 0.4 # split fraction", "step2: split attribute - method 1 features = [i for i in attr_list", "= sorted(predicted_id, key=lambda x: x[-1]) # print(predicted_sorted) # assigning predicted ranks predicted_rank_sorted =", "list() for c in range(len(pdcontent)): configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(), # feature list", "find the (near) optimal configuration in unevaluated set. STEP 1: select 80%% of", "x: x[-1]) # print(predicted_sorted) # assigning predicted ranks predicted_rank_sorted = [[p[0], p[-1], i]", "4: repeat the STEP 4 until the budget (50 configs) is loss out.", "rd.shuffle(configs) # shuffle the configs indexes = range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset =", "[config.features for config in test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features)", "the 80% data \"\"\" # step1: read from csv file pdcontent = pd.read_csv(csv_file)", "07/16/2018 \"\"\" Flash, proposed by Nair et al. (arXiv '18), which aims to", "data[1] for i in train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for i in uneval_set:", "train_set while budget >= 0: # budget equals to 50 optimal_config = predict_by_cart(train_set,", "as dataset STEP 2: split the dataset into training set (30 configs) and", "\"\"\" return the predicted optimal condiguration \"\"\" train_features = [config.features for config in", "and return the 80% data \"\"\" # step1: read from csv file pdcontent", "all feature list # step2: split attribute - method 1 features = [i", "print(config.index, \"-\", config.perfs, \"-\", config.predicted, \"-\", config.rank) # step4: data split # fraction", "train_set and unevaluated_set \"\"\" #initilize the train set with 30 configurations rd.shuffle(dataset) train_set", "the dataset into training set (30 configs) and unevaluated set (remaining configs) STEP", "top-10 configuration # for sf in select_few: # print(\"actual rank:\", sf[0], \" actual", "set. STEP 4: repeat the STEP 4 until the budget (50 configs) is", "rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set = dataset for config in train_set: unevaluated_set =", "train set with 30 configurations rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set = dataset for", "p] for i, p in enumerate(predicted)] # i-> actual rank, p -> predicted", "STEP 2: split the dataset into training set (30 configs) and unevaluated set", "p[-1] -> perdicted value, i -> predicted rank select_few = predicted_rank_sorted[:10] # print", "in unevaluated set. STEP 1: select 80%% of original data as dataset STEP", "# shuffle the configs indexes = range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset = [configs[i]", "sklearn.tree import DecisionTreeRegressor class config_node: \"\"\" for each configuration, we create a config_node", "-> actual rank, p[-1] -> perdicted value, i -> predicted rank select_few =", "numpy as np from sklearn.tree import DecisionTreeRegressor class config_node: \"\"\" for each configuration,", "if \"$<\" not in i] perfs = [i for i in attr_list if", "attr_list if \"$<\" in i] sortedcontent = pdcontent.sort_values(perfs[-1]) # from small to big", "in train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index) # remove train_set while budget >= 0:", "train_index = indexes[:int(fraction*len(configs))] dataset = [configs[i] for i in train_index] # print(len(dataset)) return", "train_set.append(optimal_config) budget = budget - 1 return [train_set, unevaluated_set] if __name__ == \"__main__\":", "set to training set. STEP 4: repeat the STEP 4 until the budget", "# select 80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for i in", "unevaluated_set) # print(\"[add]:\", optimal_config.index) unevaluated_set = remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget = budget -", "aims to find the (near) optimal configuration in unevaluated set. STEP 1: select", "data split # fraction = 0.4 # split fraction # rd.seed(seed) # random", "test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features) predicted_id = [[i,p] for", "[i for i in attr_list if \"$<\" in i] sortedcontent = pdcontent.sort_values(perfs[-1]) #", "\" predicted rank:\", sf[2]) # print(\"-------------\") return np.min([sf[0] for sf in select_few]) def", "print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"### finally split\") train_set = data[0]", "x.perfs[-1]) # train data train_features = [t.features for t in train_set] train_perfs =", "rd import numpy as np from sklearn.tree import DecisionTreeRegressor class config_node: \"\"\" for", "for config in configs: # print(config.index, \"-\", config.perfs, \"-\", config.predicted, \"-\", config.rank) #", "pd.read_csv(csv_file) attr_list = pdcontent.columns # all feature list # step2: split attribute -", "print(len(sortedcontent)) # step3: collect configuration configs = list() for c in range(len(pdcontent)): configs.append(config_node(c,", "split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for i in dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data", "[[p[0], p[-1], i] for i,p in enumerate(predicted_sorted)] # p[0] -> actual rank, p[-1]", "range(len(pdcontent)): configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(), # performance list", "x[-1]) # sort test_set by predicted performance return test_set[predicted_sorted[0][0]] # the optimal configuration", "details of Progressive are introduced in paper \"Finding Faster Configurations using FLASH\". \"\"\"", "list # step2: split attribute - method 1 features = [i for i", "perfs = [i for i in attr_list if \"$<\" in i] sortedcontent =", "performance list )) # for config in configs: # print(config.index, \"-\", config.perfs, \"-\",", "as rd import numpy as np from sklearn.tree import DecisionTreeRegressor class config_node: \"\"\"", "cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features) predicted_id = [[i,p] for i,p", "break return config_pool def find_lowest_rank(train_set, test_set): \"\"\" return the lowest rank in top", "configs = list() for c in range(len(pdcontent)): configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(), #", "predicted = cart_model.predict(test_features) predicted_id = [[i,p] for i,p in enumerate(predicted)] predicted_sorted = sorted(predicted_id,", "for i, p in enumerate(predicted)] # i-> actual rank, p -> predicted value", "x: x[-1]) # sort test_set by predicted performance return test_set[predicted_sorted[0][0]] # the optimal", "pdcontent = pd.read_csv(csv_file) attr_list = pdcontent.columns # all feature list # step2: split", "budget (50 configs) is loss out. The details of Progressive are introduced in", "in train_set] train_perfs = [config.perfs[-1] for config in train_set] test_features = [config.features for", "configs) and unevaluated set (remaining configs) STEP 3: predict the optimal configuration in", "each configuration, we create a config_node object to save its informations index :", "predicted_id = [[i,p] for i,p in enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda x: x[-1])", "out. The details of Progressive are introduced in paper \"Finding Faster Configurations using", "config in train_set] test_features = [config.features for config in test_set] cart_model = DecisionTreeRegressor()", "= data[0] uneval_set = data[1] for i in train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\")", "Configurations using FLASH\". \"\"\" import pandas as pd import random as rd import", "in train_set] # test data test_perfs = [t.features for t in sorted_test] cart_model", "as np from sklearn.tree import DecisionTreeRegressor class config_node: \"\"\" for each configuration, we", "for config in train_set: unevaluated_set = remove_by_index(unevaluated_set, config.index) # remove train_set while budget", "# assigning predicted ranks predicted_rank_sorted = [[p[0], p[-1], i] for i,p in enumerate(predicted_sorted)]", "sf in select_few]) def predict_by_cart(train_set, test_set): \"\"\" return the predicted optimal condiguration \"\"\"", "# print(len(sortedcontent)) # step3: collect configuration configs = list() for c in range(len(pdcontent)):", "in i] sortedcontent = pdcontent.sort_values(perfs[-1]) # from small to big # print(len(sortedcontent)) #", "fraction = 0.4 # split fraction # rd.seed(seed) # random seed rd.shuffle(configs) #", "sorted_test = sorted(test_set, key=lambda x: x.perfs[-1]) # train data train_features = [t.features for", "\"\"\" import pandas as pd import random as rd import numpy as np", "# for config in configs: # print(config.index, \"-\", config.perfs, \"-\", config.predicted, \"-\", config.rank)", "data \"\"\" # step1: read from csv file pdcontent = pd.read_csv(csv_file) attr_list =", "# performance list sortedcontent.iloc[c][perfs].tolist(), # predicted performance list )) # for config in", "Nair et al. (arXiv '18), which aims to find the (near) optimal configuration", "train_set] train_perfs = [config.perfs[-1] for config in train_set] test_features = [config.features for config", "rank, p -> predicted value predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # print(predicted_sorted)", "config in train_set] train_perfs = [config.perfs[-1] for config in train_set] test_features = [config.features", "the train set with 30 configurations rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set = dataset", "et al. (arXiv '18), which aims to find the (near) optimal configuration in", "import pandas as pd import random as rd import numpy as np from", "train_set] train_perfs = [t.perfs[-1] for t in train_set] # test data test_perfs =", "we create a config_node object to save its informations index : actual rank", "split data set and return the 80% data \"\"\" # step1: read from", "actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1], \" predicted rank:\", sf[2]) # print(\"-------------\")", "predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # sort test_set by predicted performance return", "data as dataset STEP 2: split the dataset into training set (30 configs)", "= sorted(test_set, key=lambda x: x.perfs[-1]) # train data train_features = [t.features for t", "rank in top 10 \"\"\" sorted_test = sorted(test_set, key=lambda x: x.perfs[-1]) # train", "set and return the 80% data \"\"\" # step1: read from csv file", "features : feature list perfs : actual performance \"\"\" def __init__(self, index, features,", "introduced in paper \"Finding Faster Configurations using FLASH\". \"\"\" import pandas as pd", "for c in range(len(pdcontent)): configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(),", "training set (30 configs) and unevaluated set (remaining configs) STEP 3: predict the", "(50 configs) is loss out. The details of Progressive are introduced in paper", "# random seed rd.shuffle(configs) # shuffle the configs indexes = range(len(configs)) train_index =", "indexes = range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset = [configs[i] for i in train_index]", "size=30, budget=50): \"\"\" use the budget in dataset to train a best model,", "__name__ == \"__main__\": ####################################################################################### # select 80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"###", "value predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # print(predicted_sorted) # assigning predicted ranks", "until the budget (50 configs) is loss out. The details of Progressive are", "uneval_set = data[1] for i in train_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for i", "big # print(len(sortedcontent)) # step3: collect configuration configs = list() for c in", "i in attr_list if \"$<\" in i] sortedcontent = pdcontent.sort_values(perfs[-1]) # from small", "for each configuration, we create a config_node object to save its informations index", "lowest rank in top 10 \"\"\" sorted_test = sorted(test_set, key=lambda x: x.perfs[-1]) #", "dataset = [configs[i] for i in train_index] # print(len(dataset)) return dataset def predict_by_flash(dataset,", "top 10 \"\"\" sorted_test = sorted(test_set, key=lambda x: x.perfs[-1]) # train data train_features", "cart_model.predict(test_features) predicted_id = [[i,p] for i,p in enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda x:", "proposed by Nair et al. (arXiv '18), which aims to find the (near)", "the configs indexes = range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset = [configs[i] for i", "i] sortedcontent = pdcontent.sort_values(perfs[-1]) # from small to big # print(len(sortedcontent)) # step3:", "select 80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for i in dataset:", "value:\", sf[1], \" predicted rank:\", sf[2]) # print(\"-------------\") return np.min([sf[0] for sf in", "for config in train_set] test_features = [config.features for config in test_set] cart_model =", "configuration \"\"\" for config in config_pool: if config.index == index: config_pool.remove(config) break return", "p[0] -> actual rank, p[-1] -> perdicted value, i -> predicted rank select_few", "\"\"\" return the lowest rank in top 10 \"\"\" sorted_test = sorted(test_set, key=lambda", "in train_set] test_features = [config.features for config in test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features,", "\",\", end=\"\") print(\"\\n-------------\") for i in uneval_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") ####################################################################################### lowest_rank", ": actual performance \"\"\" def __init__(self, index, features, perfs, predicted): self.index = index", "= perfs self.predicted = predicted def remove_by_index(config_pool, index): \"\"\" remove the selected configuration", "- method 1 features = [i for i in attr_list if \"$<\" not", "the (near) optimal configuration in unevaluated set. STEP 1: select 80%% of original", "dataset into training set (30 configs) and unevaluated set (remaining configs) STEP 3:", "== index: config_pool.remove(config) break return config_pool def find_lowest_rank(train_set, test_set): \"\"\" return the lowest", "in select_few: # print(\"actual rank:\", sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\",", "self.index = index self.features = features self.perfs = perfs self.predicted = predicted def", "by predicted performance return test_set[predicted_sorted[0][0]] # the optimal configuration def split_data_by_fraction(csv_file, fraction): \"\"\"", "indexes[:int(fraction*len(configs))] dataset = [configs[i] for i in train_index] # print(len(dataset)) return dataset def", "dataset def predict_by_flash(dataset, size=30, budget=50): \"\"\" use the budget in dataset to train", "use the budget in dataset to train a best model, return the train_set", "[t.perfs[-1] for t in train_set] # test data test_perfs = [t.features for t", "remove_by_index(unevaluated_set, config.index) # remove train_set while budget >= 0: # budget equals to", "sf in select_few: # print(\"actual rank:\", sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted", "for config in train_set] train_perfs = [config.perfs[-1] for config in train_set] test_features =", "train data train_features = [t.features for t in train_set] train_perfs = [t.perfs[-1] for", "list sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(), # predicted performance list )) # for", "features = [i for i in attr_list if \"$<\" not in i] perfs", "feature list perfs : actual performance \"\"\" def __init__(self, index, features, perfs, predicted):", "configuration configs = list() for c in range(len(pdcontent)): configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(),", "budget = budget - 1 return [train_set, unevaluated_set] if __name__ == \"__main__\": #######################################################################################", "enumerate(predicted)] # i-> actual rank, p -> predicted value predicted_sorted = sorted(predicted_id, key=lambda", "set (30 configs) and unevaluated set (remaining configs) STEP 3: predict the optimal", "c in range(len(pdcontent)): configs.append(config_node(c, # actual rank sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(), #", "random as rd import numpy as np from sklearn.tree import DecisionTreeRegressor class config_node:", "class config_node: \"\"\" for each configuration, we create a config_node object to save", "configs) STEP 3: predict the optimal configuration in unevaluated set, then remove it", "80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for i in dataset: print(str(i.index),", "list sortedcontent.iloc[c][perfs].tolist(), # predicted performance list )) # for config in configs: #", "# all feature list # step2: split attribute - method 1 features =", "end=\"\") print(\"\\n-------------\") for i in uneval_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") ####################################################################################### lowest_rank =", "[[i, p] for i, p in enumerate(predicted)] # i-> actual rank, p ->", "= budget - 1 return [train_set, unevaluated_set] if __name__ == \"__main__\": ####################################################################################### #", "def predict_by_flash(dataset, size=30, budget=50): \"\"\" use the budget in dataset to train a", "coding=utf-8 # Author: youngfeng # Update: 07/16/2018 \"\"\" Flash, proposed by Nair et", "selected configuration \"\"\" for config in config_pool: if config.index == index: config_pool.remove(config) break", "unevaluated set, then remove it from unevaluated set to training set. STEP 4:", "unevaluated set (remaining configs) STEP 3: predict the optimal configuration in unevaluated set,", "sf[1], \" predicted rank:\", sf[2]) # print(\"-------------\") return np.min([sf[0] for sf in select_few])", "DecisionTreeRegressor class config_node: \"\"\" for each configuration, we create a config_node object to", "= sorted(predicted_id, key=lambda x: x[-1]) # sort test_set by predicted performance return test_set[predicted_sorted[0][0]]", "1 return [train_set, unevaluated_set] if __name__ == \"__main__\": ####################################################################################### # select 80% data", "predict_by_flash(dataset) print(\"### finally split\") train_set = data[0] uneval_set = data[1] for i in", "optimal condiguration \"\"\" train_features = [config.features for config in train_set] train_perfs = [config.perfs[-1]", "config in config_pool: if config.index == index: config_pool.remove(config) break return config_pool def find_lowest_rank(train_set,", "configs indexes = range(len(configs)) train_index = indexes[:int(fraction*len(configs))] dataset = [configs[i] for i in", "# the optimal configuration def split_data_by_fraction(csv_file, fraction): \"\"\" split data set and return", "actual rank features : feature list perfs : actual performance \"\"\" def __init__(self,", "[i for i in attr_list if \"$<\" not in i] perfs = [i", "== \"__main__\": ####################################################################################### # select 80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\")", "# step4: data split # fraction = 0.4 # split fraction # rd.seed(seed)", "\"$<\" in i] sortedcontent = pdcontent.sort_values(perfs[-1]) # from small to big # print(len(sortedcontent))", "= index self.features = features self.perfs = perfs self.predicted = predicted def remove_by_index(config_pool,", "feature list sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(), # predicted performance list )) #", "config_pool: if config.index == index: config_pool.remove(config) break return config_pool def find_lowest_rank(train_set, test_set): \"\"\"", "rank sortedcontent.iloc[c][features].tolist(), # feature list sortedcontent.iloc[c][perfs].tolist(), # performance list sortedcontent.iloc[c][perfs].tolist(), # predicted performance", "select 80%% of original data as dataset STEP 2: split the dataset into", "perdicted value, i -> predicted rank select_few = predicted_rank_sorted[:10] # print the predcited", "in enumerate(predicted_sorted)] # p[0] -> actual rank, p[-1] -> perdicted value, i ->", "in sorted_test] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs) predicted_id = [[i,", "# fraction = 0.4 # split fraction # rd.seed(seed) # random seed rd.shuffle(configs)", "the budget in dataset to train a best model, return the train_set and", "print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"### finally split\") train_set = data[0] uneval_set = data[1]", "from csv file pdcontent = pd.read_csv(csv_file) attr_list = pdcontent.columns # all feature list", "i in uneval_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") ####################################################################################### lowest_rank = find_lowest_rank(train_set, uneval_set) print(lowest_rank)", "predicted performance list )) # for config in configs: # print(config.index, \"-\", config.perfs,", "rank select_few = predicted_rank_sorted[:10] # print the predcited top-10 configuration # for sf", "predict_by_flash(dataset, size=30, budget=50): \"\"\" use the budget in dataset to train a best", "small to big # print(len(sortedcontent)) # step3: collect configuration configs = list() for", "the predcited top-10 configuration # for sf in select_few: # print(\"actual rank:\", sf[0],", "attr_list = pdcontent.columns # all feature list # step2: split attribute - method", "return the predicted optimal condiguration \"\"\" train_features = [config.features for config in train_set]", "to find the (near) optimal configuration in unevaluated set. STEP 1: select 80%%", "train_perfs) predicted = cart_model.predict(test_features) predicted_id = [[i,p] for i,p in enumerate(predicted)] predicted_sorted =", "sort test_set by predicted performance return test_set[predicted_sorted[0][0]] # the optimal configuration def split_data_by_fraction(csv_file,", "predicted_rank_sorted[:10] # print the predcited top-10 configuration # for sf in select_few: #", "to save its informations index : actual rank features : feature list perfs", "predicted): self.index = index self.features = features self.perfs = perfs self.predicted = predicted", "value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1], \" predicted rank:\", sf[2]) # print(\"-------------\") return", "# step1: read from csv file pdcontent = pd.read_csv(csv_file) attr_list = pdcontent.columns #", "return [train_set, unevaluated_set] if __name__ == \"__main__\": ####################################################################################### # select 80% data dataset", "i,p in enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # sort test_set by", "[train_set, unevaluated_set] if __name__ == \"__main__\": ####################################################################################### # select 80% data dataset =", "# test data test_perfs = [t.features for t in sorted_test] cart_model = DecisionTreeRegressor()", "with 30 configurations rd.shuffle(dataset) train_set = dataset[:size] unevaluated_set = dataset for config in", "not in i] perfs = [i for i in attr_list if \"$<\" in", "[[i,p] for i,p in enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda x: x[-1]) # sort", "= [configs[i] for i in train_index] # print(len(dataset)) return dataset def predict_by_flash(dataset, size=30,", "[config.features for config in train_set] train_perfs = [config.perfs[-1] for config in train_set] test_features", "(near) optimal configuration in unevaluated set. STEP 1: select 80%% of original data", "= cart_model.predict(test_features) predicted_id = [[i,p] for i,p in enumerate(predicted)] predicted_sorted = sorted(predicted_id, key=lambda", "select_few: # print(\"actual rank:\", sf[0], \" actual value:\", sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1],", "[t.features for t in train_set] train_perfs = [t.perfs[-1] for t in train_set] #", "in train_set] train_perfs = [t.perfs[-1] for t in train_set] # test data test_perfs", "predict_by_cart(train_set, test_set): \"\"\" return the predicted optimal condiguration \"\"\" train_features = [config.features for", "are introduced in paper \"Finding Faster Configurations using FLASH\". \"\"\" import pandas as", "select_few = predicted_rank_sorted[:10] # print the predcited top-10 configuration # for sf in", "in attr_list if \"$<\" not in i] perfs = [i for i in", "train_index] # print(len(dataset)) return dataset def predict_by_flash(dataset, size=30, budget=50): \"\"\" use the budget", "unevaluated set to training set. STEP 4: repeat the STEP 4 until the", "train a best model, return the train_set and unevaluated_set \"\"\" #initilize the train", "sf[2]) # print(\"-------------\") return np.min([sf[0] for sf in select_few]) def predict_by_cart(train_set, test_set): \"\"\"", "collect configuration configs = list() for c in range(len(pdcontent)): configs.append(config_node(c, # actual rank", "= pd.read_csv(csv_file) attr_list = pdcontent.columns # all feature list # step2: split attribute", "= pdcontent.columns # all feature list # step2: split attribute - method 1", "a config_node object to save its informations index : actual rank features :", "fraction # rd.seed(seed) # random seed rd.shuffle(configs) # shuffle the configs indexes =", "for config in test_set] cart_model = DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features) predicted_id", "\"\"\" use the budget in dataset to train a best model, return the", "0: # budget equals to 50 optimal_config = predict_by_cart(train_set, unevaluated_set) # print(\"[add]:\", optimal_config.index)", "#!\\usr\\bin\\python # coding=utf-8 # Author: youngfeng # Update: 07/16/2018 \"\"\" Flash, proposed by", "(arXiv '18), which aims to find the (near) optimal configuration in unevaluated set.", "index): \"\"\" remove the selected configuration \"\"\" for config in config_pool: if config.index", "= DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_features) predicted_id = [[i,p] for i,p in", "# print(predicted_sorted) # assigning predicted ranks predicted_rank_sorted = [[p[0], p[-1], i] for i,p", "from small to big # print(len(sortedcontent)) # step3: collect configuration configs = list()", "= DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs) predicted_id = [[i, p] for i,", "in attr_list if \"$<\" in i] sortedcontent = pdcontent.sort_values(perfs[-1]) # from small to", "budget - 1 return [train_set, unevaluated_set] if __name__ == \"__main__\": ####################################################################################### # select", "youngfeng # Update: 07/16/2018 \"\"\" Flash, proposed by Nair et al. (arXiv '18),", "pdcontent.columns # all feature list # step2: split attribute - method 1 features", "= remove_by_index(unevaluated_set, optimal_config.index) train_set.append(optimal_config) budget = budget - 1 return [train_set, unevaluated_set] if", "to big # print(len(sortedcontent)) # step3: collect configuration configs = list() for c", "for config in config_pool: if config.index == index: config_pool.remove(config) break return config_pool def", "perfs : actual performance \"\"\" def __init__(self, index, features, perfs, predicted): self.index =", "DecisionTreeRegressor() cart_model.fit(train_features, train_perfs) predicted = cart_model.predict(test_perfs) predicted_id = [[i, p] for i, p", "the optimal configuration def split_data_by_fraction(csv_file, fraction): \"\"\" split data set and return the", "Author: youngfeng # Update: 07/16/2018 \"\"\" Flash, proposed by Nair et al. (arXiv", "csv file pdcontent = pd.read_csv(csv_file) attr_list = pdcontent.columns # all feature list #", ": actual rank features : feature list perfs : actual performance \"\"\" def", "configuration def split_data_by_fraction(csv_file, fraction): \"\"\" split data set and return the 80% data", "config in configs: # print(config.index, \"-\", config.perfs, \"-\", config.predicted, \"-\", config.rank) # step4:", "train_set = data[0] uneval_set = data[1] for i in train_set: print(str(i.index), \",\", end=\"\")", "set (remaining configs) STEP 3: predict the optimal configuration in unevaluated set, then", "and unevaluated set (remaining configs) STEP 3: predict the optimal configuration in unevaluated", "return np.min([sf[0] for sf in select_few]) def predict_by_cart(train_set, test_set): \"\"\" return the predicted", "sortedcontent = pdcontent.sort_values(perfs[-1]) # from small to big # print(len(sortedcontent)) # step3: collect", "####################################################################################### # select 80% data dataset = split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for i", "into training set (30 configs) and unevaluated set (remaining configs) STEP 3: predict", "'18), which aims to find the (near) optimal configuration in unevaluated set. STEP", "sorted_test[sf[0]].perfs[-1], \" predicted value:\", sf[1], \" predicted rank:\", sf[2]) # print(\"-------------\") return np.min([sf[0]", "optimal configuration def split_data_by_fraction(csv_file, fraction): \"\"\" split data set and return the 80%", "print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") for i in uneval_set: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") #######################################################################################", "initialzation\") for i in dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\") data = predict_by_flash(dataset) print(\"###", "= split_data_by_fraction(\"data/Apache_AllMeasurements.csv\", 0.8) print(\"### initialzation\") for i in dataset: print(str(i.index), \",\", end=\"\") print(\"\\n-------------\")", "perfs, predicted): self.index = index self.features = features self.perfs = perfs self.predicted =", "= remove_by_index(unevaluated_set, config.index) # remove train_set while budget >= 0: # budget equals", "actual rank, p[-1] -> perdicted value, i -> predicted rank select_few = predicted_rank_sorted[:10]", "= predicted def remove_by_index(config_pool, index): \"\"\" remove the selected configuration \"\"\" for config" ]
[ "import sys ath = boto3.client('athena') try: response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database", "boto3.client('athena') try: response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\") except: print(\"No Database", "DatabaseName='scifimovies' ) print(\"Database found\") except: print(\"No Database Found\") try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog',", "try: response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\") except: print(\"No Database Found\")", "boto3 import sys ath = boto3.client('athena') try: response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' )", "except: print(\"No Database Found\") try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies', TableName='scifix' ) print(\"Table", "import boto3 import sys ath = boto3.client('athena') try: response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies'", "CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\") except: print(\"No Database Found\") try: response = ath.get_table_metadata(", "print(\"No Database Found\") try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies', TableName='scifix' ) print(\"Table Exists\")", "print(\"Database found\") except: print(\"No Database Found\") try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies', TableName='scifix'", "found\") except: print(\"No Database Found\") try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies', TableName='scifix' )", "ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\") except: print(\"No Database Found\") try: response =", "= ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\") except: print(\"No Database Found\") try: response", "ath = boto3.client('athena') try: response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\") except:", "import os import boto3 import sys ath = boto3.client('athena') try: response = ath.get_database(", "response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\") except: print(\"No Database Found\") try:", "Found\") try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies', TableName='scifix' ) print(\"Table Exists\") except: print(\"No", "<filename>debugging/check_athena.py import os import boto3 import sys ath = boto3.client('athena') try: response =", "= boto3.client('athena') try: response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\") except: print(\"No", "os import boto3 import sys ath = boto3.client('athena') try: response = ath.get_database( CatalogName='AwsDataCatalog',", "try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies', TableName='scifix' ) print(\"Table Exists\") except: print(\"No Table", "Database Found\") try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies', TableName='scifix' ) print(\"Table Exists\") except:", "sys ath = boto3.client('athena') try: response = ath.get_database( CatalogName='AwsDataCatalog', DatabaseName='scifimovies' ) print(\"Database found\")", "response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies', TableName='scifix' ) print(\"Table Exists\") except: print(\"No Table Found\")", ") print(\"Database found\") except: print(\"No Database Found\") try: response = ath.get_table_metadata( CatalogName='AwsDataCatalog', DatabaseName='scifimovies'," ]
[ "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float'", "}, 'WGL_EXT_depth_float' : { 'number' : 177, 'flags' : { 'public' }, 'supporters'", "'esnumber' : 76, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "}, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : { 'number' : 367, 'flags' :", "'esnumber' : 86, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting'", ": 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : { 'number' : 268, 'flags' : { 'public'", "'arbnumber' : 110, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader'", ": { 'GOOGLE', 'NVIDIA', 'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : {", ": 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : { 'esnumber' : 40, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : { 'number' : 464, 'esnumber' :", "}, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : { 'esnumber' : 129, 'flags' :", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' :", "'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : { 'esnumber' : 185, 'flags' : { 'public' },", ": 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : { 'esnumber' : 240, 'flags' : { 'public'", "194, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : { 'arbnumber' : 77, 'flags' : { 'public'", "78, 'flags' : { 'public' }, 'supporters' : { 'MS', 'SGI' }, 'url'", ": { 'obsolete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', },", "{ 'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : { 'number' : 62,", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' },", "}, 'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : { 'esnumber' : 210, 'flags' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : { 'number'", ": { 'number' : 9, 'flags' : { 'public' }, 'supporters' : {", "'GL_SGIX_vertex_array_object' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip'", "}, 'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : { 'arbnumber' : 141, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'SGI' }, 'url'", "'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : { 'esnumber' : 64,", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' :", "65, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC',", "{ 'number' : 411, 'flags' : { 'public' }, 'supporters' : { 'AMD'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : { 'number' :", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : { 'flags' : { 'incomplete'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : { 'arbnumber' :", ": { 'esnumber' : 147, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt',", ": 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : { 'flags' : { 'incomplete' }, 'url' :", ": { 'esnumber' : 27, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt',", ": 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : { 'arbnumber' : 16, 'flags' : { 'public'", "{ 'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : { 'number' : 514,", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : { 'number'", ": 16, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'number' : 131, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'GL_ARB_clear_texture' : { 'arbnumber' : 145, 'flags' : { 'public' }, 'url' :", "'GL_ARB_fragment_program_shadow' : { 'arbnumber' : 36, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : { 'number' : 187, 'esnumber' : 41, 'flags'", "'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : { 'arbnumber' : 5, 'flags'", "'GL_SGIS_fog_function' : { 'number' : 64, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : { 'number' : 480, 'flags' : {", "}, 'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : { 'number' : 165, 'flags' :", ": 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : { 'number' : 251, 'flags' : { 'public'", "'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : { 'number' : 512, 'flags' : {", "'GL_EXT_occlusion_query_boolean' : { 'esnumber' : 100, 'flags' : { 'public' }, 'url' :", ": 180, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' :", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' :", ": 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' : { 'arbnumber' :", ": { 'number' : 470, 'esnumber' : 233, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with arbnumber", ": { 'esnumber' : 20, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt',", "}, 'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : { 'arbnumber' : 163, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt',", "}, }, 'GL_EXT_geometry_shader' : { 'esnumber' : 177, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber'", "'GL_NV_present_video' : { 'number' : 347, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' : 174,", ": 60, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt', },", "{ 'number' : 410, 'esnumber' : 199, 'flags' : { 'public' }, 'supporters'", "{ 'arbnumber' : 117, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt', },", ": 304, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GLX_SGIX_swap_group' : { 'number' : 91, 'flags' : { 'public' }, 'supporters'", "'GL_ARM_rgba8' : { 'esnumber' : 82, 'flags' : { 'public' }, 'url' :", ": 61, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' :", "'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' : 111, 'flags' : { 'public' },", "344, 'flags' : { 'public' }, 'supporters' : { 'MESA', 'NVIDIA' }, 'url'", "'GL_NV_primitive_restart' : { 'number' : 285, 'flags' : { 'public' }, 'supporters' :", ": { 'arbnumber' : 74, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt',", "359, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", ": { 'number' : 344, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 480, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt',", "{ 'number' : 429, 'flags' : { 'public' }, 'supporters' : { 'INTEL'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : { 'arbnumber'", ": 161, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'arbnumber' : 177, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments'", ": 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : { 'number' : 299, 'flags' : { 'public'", "'GL_ARB_sparse_texture_clamp' : { 'arbnumber' : 187, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : { 'arbnumber' : 131,", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : { 'number' : 332, 'esnumber'", "'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' : 492, 'esnumber'", ": 101, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'arbnumber' : 28, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : { 'number' : 317,", "'GL_ARB_separate_shader_objects' : { 'arbnumber' : 97, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : { 'esnumber' : 169, 'flags' : {", ": 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : { 'esnumber' : 24, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : { 'arbnumber' : 136, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : { 'number' : 379, 'flags'", "'GL_EXT_blend_func_separate' : { 'number' : 173, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : { 'arbnumber' : 120, 'flags'", ": 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' : 291, 'flags' : { 'public'", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : { 'number' : 119, 'flags'", "{ 'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : { 'esnumber' : 15,", ": { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : { 'number' :", ": { 'number' : 33, 'flags' : { 'public' }, 'supporters' : {", "'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP support.', }, 'GL_SGI_complex'", ": 73, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt', },", "}, 'GL_ARB_viewport_array' : { 'arbnumber' : 100, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2'", "{ 'arbnumber' : 67, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', },", "48, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : {", "'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : { 'esnumber' : 251, 'flags'", "}, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : { 'esnumber' : 134, 'flags' :", "'GL_ARB_sparse_texture' : { 'arbnumber' : 158, 'flags' : { 'public' }, 'url' :", "'GL_EXT_shader_integer_mix' : { 'number' : 437, 'esnumber' : 161, 'flags' : { 'public'", "'number' : 67, 'flags' : { 'public' }, 'supporters' : { 'HP' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt',", "}, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : { 'flags' : { 'incomplete' },", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite'", "'esnumber' : 110, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary'", "'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber' : 194, 'flags'", "'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber' : 159, 'flags' : { 'public' },", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : { 'number'", "'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : { 'number' : 155, 'flags'", "'esnumber' : 252, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage'", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt',", "{ 'number' : 493, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : { 'number' : 99, 'flags'", "}, 'GL_EXT_misc_attribute' : { 'number' : 31, 'flags' : { 'public' }, 'url'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : { 'number' :", "}, 'GL_NV_non_square_matrices' : { 'esnumber' : 160, 'flags' : { 'public' }, 'url'", ": 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' : 409, 'flags' : { 'public'", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : { 'number'", "117, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : {", "'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt',", ": { 'number' : 188, 'flags' : { 'public' }, 'supporters' : {", "'GL_NV_texgen_emboss' : { 'number' : 193, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 245, 'flags' : { 'public' }, 'supporters' : {", "414, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'GL_EXT_float_blend' : { 'esnumber' : 224, 'flags' : { 'public' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : { 'number' : 140, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : { 'number' : 84, 'flags' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : { 'esnumber' : 290, 'flags'", ": 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : { 'number' : 470, 'esnumber' : 233, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : { 'number' : 34, 'flags' :", "}, 'GLX_EXT_libglvnd' : { 'number' : 482, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : { 'number' : 346, 'esnumber' : 198,", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt',", "'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : { 'number' : 220,", "'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : { 'esnumber' : 68, 'flags' : {", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' :", "'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt',", "}, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : { 'number' : 56, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : { 'number'", "{ 'number' : 183, 'flags' : { 'public' }, 'supporters' : { 'SUN'", "'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' : 21, 'flags' : { 'public' },", "}, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber' : 125, 'flags' : { 'public' }, 'url'", "}, 'GL_EXT_multi_draw_indirect' : { 'esnumber' : 205, 'flags' : { 'public' }, 'url'", "'GL_SGIX_blend_alpha_minmax' : { 'number' : 119, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : { 'arbnumber' : 19,", "{ 'esnumber' : 257, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', },", "'GL_OES_fixed_point' : { 'number' : 292, 'esnumber' : 9, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : { 'esnumber' : 238, 'flags' :", "'esnumber' : 263, 'flags' : { 'public' }, 'supporters' : { 'GOOGLE', 'NVIDIA',", ": 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : { 'esnumber' : 269, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'MESA', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt',", ": 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : { 'number' : 403, 'flags' : { 'public'", "}, 'GL_EXT_clip_cull_distance' : { 'esnumber' : 257, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : { 'arbnumber' : 157, 'flags' : {", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : { 'esnumber' : 83,", ": 283, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' :", "'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : { 'number' : 63, 'flags' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : { 'number' :", "'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : { 'arbnumber' : 164, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt',", "'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : { 'esnumber' : 27, 'flags'", "'esnumber' : 249, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack'", ": { 'public' }, 'supporters' : { '3DFX', '3DL', 'SGI' }, 'url' :", "'GL_SGIX_instrument_error' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments'", "'GL_EXT_conservative_depth' : { 'esnumber' : 268, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : { 'number'", "}, 'GL_SGIS_texture_color_mask' : { 'number' : 214, 'flags' : { 'incomplete', 'public' },", "{ 'number' : 184, 'flags' : { 'incomplete', 'public' }, 'supporters' : {", "}, 'GL_EXT_shared_texture_palette' : { 'number' : 141, 'flags' : { 'public' }, 'supporters'", "'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : { 'number' : 291, 'esnumber' : 4, 'flags' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt', },", "'arbnumber' : 41, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "148, 'esnumber' : 69, 'flags' : { 'public' }, 'supporters' : { 'IBM',", "}, 'GL_NV_primitive_restart' : { 'number' : 285, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : { 'esnumber' : 219, 'flags' :", "'arbnumber' : 65, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture'", ": 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : { 'number' : 522, 'esnumber' : 301, 'flags'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : { 'number' : 11,", "}, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : { 'number' : 506, 'esnumber' :", "}, 'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : { 'number' : 241, 'flags' :", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : { 'number' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : { 'number'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : { 'flags' : {", ": { 'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : { 'esnumber' :", "'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : { 'number' : 199, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : { 'esnumber' :", ": 233, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "97, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : { 'number'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', },", "}, 'GLX_ARB_create_context_no_error' : { 'arbnumber' : 191, 'flags' : { 'public' }, 'url'", ": { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : { 'esnumber'", "}, 'supporters' : { 'IBM', 'IMG', 'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' :", ": 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : { 'number' : 405, 'flags' : { 'public'", "}, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' : {", "'arbnumber' : 76, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing'", ": { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : { 'number'", ": { 'esnumber' : 175, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt',", "'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : { 'esnumber' : 29, 'flags'", "}, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : { 'number' : 222, 'esnumber' :", "{ 'arbnumber' : 144, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', },", "{ 'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' :", ": { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', },", "'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : { 'esnumber' : 37, 'flags'", "'arbnumber' : 170, 'esnumber' : 190, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' :", "'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : { 'number' : 294, 'esnumber'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : { 'esnumber' : 175,", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod'", "'number' : 317, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled'", ": { 'esnumber' : 106, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt',", ": 477, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' :", ": { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : { 'number' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', },", ": 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : { 'esnumber' : 169, 'flags' : { 'public'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on Visual", "'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : { 'number' : 317, 'flags' : {", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : {", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' :", "}, 'GL_ARB_transpose_matrix' : { 'arbnumber' : 3, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' :", "'number' : 244, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : { 'number' : 115, 'flags' : {", ": { 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' : { 'arbnumber' : 26, 'flags' :", "'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : { 'number' : 315, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt',", "'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : { 'number' : 101, 'flags' : { 'public' },", "'GL_EXT_blend_color' : { 'number' : 2, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 225, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : { 'number' : 469, 'esnumber' : 231,", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : {", "{ 'number' : 360, 'esnumber' : 50, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' : 110, 'flags' :", ": 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber' : 46, 'flags' : { 'public'", ": { 'arbnumber' : 101, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt',", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', },", "'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' : 185, 'flags' : { 'public' }, 'url' :", "{ 'arbnumber' : 110, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', },", "'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : { 'number' : 327, 'esnumber'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : { 'number'", ": { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : { 'number' :", "}, }, 'GL_ARB_vertex_program' : { 'arbnumber' : 26, 'flags' : { 'public' },", "'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : { 'number' : 514, 'flags' : {", ": 61, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'esnumber' : 298, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', },", ": 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : { 'arbnumber' : 50, 'flags' : { 'public'", ": 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : { 'esnumber' : 135, 'flags' : { 'public'", "'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : { 'number' : 170,", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : { 'number'", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' :", "128, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : { 'number' : 322,", ": 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : { 'number' : 227, 'flags' : { 'public'", "97, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : {", "'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : { 'number' : 27, 'flags' : {", "272, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt',", "'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt',", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : { 'number'", ": 155, 'flags' : { 'public' }, 'supporters' : { 'REND' }, 'url'", "268, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' :", "'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : { 'number' : 310,", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' :", "'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : { 'number' : 90,", "{ 'arbnumber' : 54, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'number' : 505, 'esnumber' : 282, 'flags' : { 'public' },", ": 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : { 'number' : 384, 'flags' : { 'public'", "'GL_ATI_pn_triangles' : { 'number' : 246, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' :", "}, 'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : { 'arbnumber' : 77, 'flags' :", ": 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : { 'number' : 346, 'esnumber' : 198, 'flags'", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : { 'number' : 371,", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', },", ": 229, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' :", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : { 'esnumber' :", "}, 'GL_NV_vertex_program2' : { 'number' : 287, 'flags' : { 'public' }, 'supporters'", "24, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', },", "'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : { 'arbnumber' : 134, 'flags'", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : {", "}, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : { 'number' : 502, 'flags' :", "255, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt',", ": { 'number' : 515, 'esnumber' : 292, 'flags' : { 'public' },", "{ 'number' : 165, 'flags' : { 'public' }, 'supporters' : { 'SUN'", "'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI',", ": 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : { 'esnumber' : 138, 'flags' : { 'public'", ": 194, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' :", "'arbnumber' : 27, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias'", "'number' : 135, 'flags' : { 'public' }, 'supporters' : { 'INTEL' },", "}, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : { 'arbnumber'", "'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' :", "'GL_EXT_primitive_bounding_box' : { 'esnumber' : 186, 'flags' : { 'public' }, 'url' :", ": 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : { 'number' : 398, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' : 195,", ": 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : { 'number' : 514, 'flags' : { 'public'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : { 'number' :", "'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : { 'arbnumber' : 76, 'flags'", ": { 'arbnumber' : 153, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt',", ": 115, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' :", ": { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : { 'number' :", "}, 'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : { 'arbnumber' : 152, 'flags' :", ": 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : { 'arbnumber' : 96, 'flags' : { 'public'", "{ 'number' : 470, 'esnumber' : 233, 'flags' : { 'public' }, 'url'", "'GL_SUN_triangle_list' : { 'number' : 165, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 201, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 369, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : { 'number' : 407, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : { 'arbnumber' :", ": 406, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : { 'number' : 191, 'flags' :", "192, 'esnumber' : 288, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : { 'number' : 305, 'flags'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : {", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : { 'number' : 126, 'flags'", ": { 'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : { 'esnumber' :", "}, 'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : { 'number' : 47, 'flags' :", "}, 'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : { 'number' : 295, 'esnumber' :", "}, 'GL_SGIX_dvc' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt', },", "}, 'GL_AMD_vertex_shader_tessellator' : { 'number' : 363, 'flags' : { 'public' }, 'supporters'", "133, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : {", "{ 'number' : 445, 'flags' : { 'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', },", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : { 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : { 'number' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : { 'arbnumber' :", "}, 'GL_ATI_map_object_buffer' : { 'number' : 288, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt',", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : { 'arbnumber'", ": 204, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' :", "'IBM', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : { 'number' :", "178, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : {", "}, 'GL_GREMEDY_string_marker' : { 'number' : 311, 'flags' : { 'public' }, 'supporters'", "'GL_NV_draw_instanced' : { 'esnumber' : 141, 'flags' : { 'public' }, 'url' :", ": { 'esnumber' : 11, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt',", "'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : { 'flags' : { 'incomplete'", "'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' : 188, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : { 'arbnumber' : 59,", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : { 'esnumber'", "'supporters' : { 'IBM', 'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : {", "'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : { 'number' : 337, 'flags' : {", "'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : { 'number' : 429, 'flags' : { 'public' },", "386, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : { 'esnumber' : 115,", ": 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : { 'arbnumber' : 133, 'flags' : { 'public'", "'GL_NV_vdpau_interop' : { 'number' : 396, 'flags' : { 'public' }, 'supporters' :", "'number' : 256, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", ": 463, 'esnumber' : 259, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt',", "{ 'esnumber' : 114, 'flags' : { 'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', },", "'supporters' : { '3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : { 'number'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : { 'arbnumber' : 115, 'flags'", "'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : { 'number' : 337,", "'esnumber' : 281, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' :", ": { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : { 'number'", "'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : { 'number' : 360, 'esnumber' : 50, 'flags' :", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : { 'flags' : { 'incomplete'", "'3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : { 'number' :", "'DEC', 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' :", ": { 'number' : 252, 'flags' : { 'public' }, 'supporters' : {", "'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : { 'number' : 436, 'flags'", "200, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : {", ": 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : { 'number' : 9, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'IBM', 'IMG', 'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt',", ": 181, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : {", "'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber' : 299, 'flags' : {", "'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : { 'number' : 341, 'flags' : { 'public' },", "'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' : {", "}, 'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : { 'number' : 440, 'esnumber' :", "'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : { 'number' : 509, 'flags' : {", "'number' : 290, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", ": { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : {", ": 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : { 'number' : 426, 'flags' : { 'public'", "'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : { 'number' : 209, 'flags' : {", "}, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : { 'esnumber' : 78, 'flags' :", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : { 'number' : 36, 'flags'", "'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : { 'number' : 352, 'flags' : {", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : {", "'arbnumber' : 155, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects'", "'arbnumber' : 188, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float'", "}, 'GL_SGIS_texture_select' : { 'number' : 51, 'flags' : { 'public' }, 'supporters'", "{ 'esnumber' : 248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', },", "'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : { 'number' : 85, 'flags' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' :", "location unknown.', }, 'GL_OES_point_size_array' : { 'esnumber' : 14, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', },", "}, 'GL_ARB_texture_buffer_object' : { 'arbnumber' : 51, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : { 'number'", "'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : { 'number' : 170, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt',", ": 397, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : { 'number' :", "'GL_EXT_422_pixels' : { 'number' : 178, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : { 'number' : 320, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' : 195, 'flags' :", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : { 'number' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : { 'flags' :", "}, 'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : { 'esnumber' : 217, 'flags' :", "'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : { 'number' : 214,", "'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt',", ": { 'arbnumber' : 67, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt',", "}, 'GL_EXT_stencil_two_side' : { 'number' : 268, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : { 'number' : 219, 'flags' :", "'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : { 'number' : 351, 'flags' : { 'public' },", ": 'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture' : { 'number' : 4, 'flags' :", "'GL_ARB_multi_bind' : { 'arbnumber' : 147, 'flags' : { 'public' }, 'url' :", ": 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : { 'esnumber' : 120, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' : 289, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : { 'number'", "'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : { 'number' : 386, 'flags' : { 'public' },", "'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : { 'arbnumber' : 105, 'flags' : { 'public' },", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex'", ": { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt', },", "43, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : {", "72, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : {", "'number' : 195, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 280, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' : {", ": 25, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'esnumber' : 200, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_image_formats.txt', },", "}, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : { 'flags' : { 'incomplete' },", "57, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : { 'number' : 272, 'flags'", ": 158, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' :", "'arbnumber' : 38, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "}, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : { 'number' : 67, 'flags' :", ": 231, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : { 'number' : 239, 'flags' :", "451, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : {", "'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : { 'esnumber' : 222, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber' : 109, 'flags' : {", "'comments' : 'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture' : { 'number' : 4, 'flags'", ": { 'number' : 425, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' :", "}, 'GL_NV_texture_barrier' : { 'number' : 381, 'esnumber' : 271, 'flags' : {", ": { 'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber' :", "'number' : 278, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "}, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber' : 193, 'flags' : { 'public' }, 'supporters'", "'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : { 'number' :", ": 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : { 'arbnumber' : 105, 'flags' : { 'public'", "'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : { 'number' : 390, 'flags' : { 'public' },", "'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : { 'arbnumber' : 107, 'flags' : { 'public' },", "'arbnumber' : 130, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object'", ": 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : { 'flags' : { 'incomplete' }, 'url' :", "'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : { 'esnumber' : 213, 'flags' : {", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : { 'number' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : { 'arbnumber' : 69, 'flags'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode'", "'GL_ARB_texture_buffer_object' : { 'arbnumber' : 51, 'flags' : { 'public' }, 'supporters' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : { 'arbnumber' :", "'number' : 397, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber' : 193, 'flags'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : { 'number' :", "'GL_SGIX_color_matrix_accuracy' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode'", "}, 'GL_AMD_gpu_shader_half_float' : { 'number' : 496, 'flags' : { 'public' }, 'supporters'", ": 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : { 'number' : 471, 'esnumber' : 234, 'flags'", "'WGL_OML_sync_control' : { 'number' : 242, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : { 'flags' : { 'incomplete' }, 'url'", "'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' : 253, 'flags'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber' :", "}, 'GL_NV_texture_rectangle_compressed' : { 'number' : 509, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : { 'arbnumber' : 164, 'flags' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : { 'flags'", ": { 'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument'", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB',", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : { 'arbnumber' : 69,", "'public' }, 'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : { 'number' : 421, 'flags'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : { 'number' : 305,", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : { 'number' :", "{ 'public' }, 'supporters' : { 'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc'", "'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' : 110, 'flags' : { 'public' }, 'url' :", ": 329, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'esnumber' : 184, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt',", ": 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : { 'esnumber' : 38, 'flags' : { 'public'", "}, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : { 'number' : 182, 'flags' :", "'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : { 'number' : 313, 'flags' : { 'public' },", ": { 'esnumber' : 95, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt',", ": { 'esnumber' : 34, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt',", "'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : { 'esnumber' : 203, 'flags' : {", "'supporters' : { 'INGR', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : {", "94, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : {", "'arbnumber' : 14, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : { 'number' :", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : { 'number' :", "}, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : { 'arbnumber' : 100, 'flags' :", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : { 'number' : 395,", "{ 'number' : 289, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "}, 'GL_EXT_texture_norm16' : { 'esnumber' : 207, 'flags' : { 'public' }, 'url'", "'number' : 234, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number'", ": { 'number' : 120, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt',", "'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' : 42, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location'", ": 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : { 'arbnumber' : 145, 'flags' : { 'public'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : { 'number' : 198, 'esnumber'", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' :", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : { 'flags' : { 'incomplete'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : {", ": { 'arbnumber' : 169, 'esnumber' : 189, 'flags' : { 'public' },", "'HP', 'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : { 'number' : 137,", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' :", ": { 'number' : 200, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced'", "{ 'number' : 336, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'GL_SGIX_async_pixel' : { 'number' : 133, 'flags' : { 'incomplete', 'public' },", "{ 'number' : 265, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "305, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : { 'esnumber' : 128, 'flags' : {", ": 337, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : { 'arbnumber' : 29, 'flags' :", "{ 'public' }, 'supporters' : { '3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association'", "}, 'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : { 'arbnumber' : 93, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : { 'arbnumber' : 127, 'flags' :", "'GL_EXT_pvrtc_sRGB' : { 'esnumber' : 155, 'flags' : { 'public' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : { 'number' : 379,", "'number' : 45, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "{ 'arbnumber' : 20, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt',", "'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : { 'arbnumber' : 160, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' :", "{ 'public' }, 'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : { 'esnumber' : 25,", "{ 'arbnumber' : 91, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', },", "257, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' :", "{ 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : { 'number' : 101,", "{ 'number' : 313, 'flags' : { 'public' }, 'supporters' : { '3DL'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : { 'arbnumber' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : { 'number'", "specified. Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : { 'esnumber' : 184, 'flags' :", ": { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : { 'number' :", ": { 'esnumber' : 166, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt',", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile' }, },", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : { 'number' : 325,", "{ 'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : { 'number' : 494,", "'GL_ARB_shader_texture_image_samples' : { 'arbnumber' : 166, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : { 'arbnumber' : 5,", "{ 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : { 'number' : 495,", "'number' : 418, 'esnumber' : 197, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : { 'number' : 259, 'flags' : { 'public' },", "183, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : {", "'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', },", "'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : { 'number' : 84, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber' : 176, 'flags' : { 'public'", "{ 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age'", "}, 'GL_SGIS_sharpen_texture' : { 'number' : 22, 'flags' : { 'public' }, 'supporters'", "63, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : {", ": { 'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : { 'esnumber' :", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : { 'number' :", "'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : { 'esnumber' : 293, 'flags' : { 'public' },", ": { 'number' : 235, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : { 'esnumber' : 78, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : { 'esnumber' : 256, 'flags' : {", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : { 'number' :", "176, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : {", "{ 'number' : 230, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'number' : 172, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI'", "'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : { 'arbnumber'", ": 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : { 'esnumber' : 202, 'flags' : { 'public'", ": { 'esnumber' : 31, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil1.txt',", "'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt',", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' :", "}, 'GL_OES_texture_buffer' : { 'esnumber' : 216, 'flags' : { 'public' }, 'url'", ": 16, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : { 'number' : 193,", "127, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : { 'arbnumber' : 100, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1'", ": 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber' : 204, 'flags' : { 'public'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : {", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : { 'number' :", ": 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : { 'number' : 159, 'flags' : { 'public'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : {", "'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : { 'arbnumber' : 139, 'flags' : {", "'arbnumber' : 49, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : { 'number' : 332,", "{ 'esnumber' : 291, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : {", "'flags' : { 'public' }, 'supporters' : { 'ES', 'INGR', 'SGI' }, 'url'", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', },", "'arbnumber' : 147, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect'", ": { 'number' : 132, 'flags' : { 'incomplete', 'public' }, 'supporters' :", ": 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : { 'number' : 202, 'flags' : { 'incomplete'", "'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : { 'esnumber' : 270, 'flags' : {", "'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' : { 'esnumber' : 112, 'flags' : {", "'GL_EXT_texture_object' : { 'number' : 20, 'flags' : { 'public' }, 'supporters' :", "275, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : { 'number' : 336, 'flags' : {", "{ 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', },", "'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' :", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : { 'number' : 16,", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : { 'flags' : {", ": { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : { 'esnumber'", "}, 'GL_EXT_pixel_transform_color_table' : { 'number' : 139, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 243, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', },", "}, 'GL_EXT_shadow_samplers' : { 'esnumber' : 102, 'flags' : { 'public' }, 'url'", "'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : { 'number'", ": 110, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' :", "65, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : { 'number' : 65,", "}, 'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : { 'number' : 179, 'flags' :", ": { 'number' : 467, 'esnumber' : 229, 'flags' : { 'public' },", "{ 'arbnumber' : 33, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' : { 'number'", "'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : { 'number' : 325, 'flags' : {", "'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' : 264, 'flags' :", "'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : { 'number' : 516, 'esnumber'", "'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : { 'number' : 308, 'flags' : {", "467, 'esnumber' : 229, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', },", "'GL_APPLE_texture_packed_float' : { 'esnumber' : 195, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', },", "'GL_ARB_matrix_palette' : { 'arbnumber' : 16, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 299, 'flags' : { 'public' }, 'supporters' : {", "{ 'number' : 315, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' :", ": 491, 'esnumber' : 265, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt',", "'flags' : { 'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : { 'number'", ": { 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : {", "'number' : 75, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : { 'number' : 318, 'flags'", "}, 'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' : 174, 'flags' :", ": 514, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : { 'esnumber' : 55, 'flags'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' :", "333, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'number' : 223, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat'", ": '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : { 'esnumber' : 1, 'flags' : { 'public'", ": { 'number' : 415, 'flags' : { 'public' }, 'supporters' : {", "135, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' :", "}, 'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : { 'number' : 501, 'esnumber' :", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : { 'number' :", "'GLX_NV_copy_buffer' : { 'number' : 457, 'flags' : { 'public' }, 'supporters' :", ": 80, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' :", ": 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : { 'number' : 200, 'flags' : { 'public'", "'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : { 'number' : 454, 'flags' : {", "}, 'GL_NV_fragment_shader_interlock' : { 'number' : 468, 'esnumber' : 230, 'flags' : {", "}, 'GL_NV_texture_multisample' : { 'number' : 403, 'flags' : { 'public' }, 'supporters'", "'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : { 'number' :", "{ 'arbnumber' : 138, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', },", ": { 'number' : 63, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' },", ": { 'esnumber' : 181, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt',", "'GL_OES_sample_shading' : { 'esnumber' : 169, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : { 'arbnumber' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : { 'esnumber' :", "{ 'number' : 134, 'flags' : { 'incomplete', 'public' }, 'supporters' : {", ": 20, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' :", "}, 'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : { 'flags' : { 'incomplete' },", ": { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : { 'arbnumber' :", "'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to GLX_ARB_create_context_profile not needed - see arbnumber", ": 436, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : { 'number' : 437,", "{ 'esnumber' : 15, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt', },", "'alias' : { 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' : { 'number' : 278, 'flags'", "{ 'number' : 135, 'flags' : { 'public' }, 'supporters' : { 'INTEL'", "}, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber' : 144, 'flags' :", "{ 'number' : 85, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt',", "'GL_OES_texture_view' : { 'esnumber' : 218, 'flags' : { 'public' }, 'url' :", ": 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : { 'number' : 435, 'flags' : { 'public'", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : { 'number'", "'flags' : { 'public' }, 'supporters' : { '3DFX', '3DL', 'SGI' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber'", "'GL_MESA_pack_invert' : { 'number' : 300, 'flags' : { 'public' }, 'supporters' :", ": 516, 'esnumber' : 294, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : { 'number' : 16, 'flags' :", "'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber' : 89, 'flags'", "'HP', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : { 'number'", ": { 'esnumber' : 71, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt',", "}, 'GL_SGIX_texture_lod_bias' : { 'number' : 84, 'flags' : { 'public' }, 'supporters'", "'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' : 59, 'flags' : { 'public' },", "'GL_ARB_depth_clamp' : { 'arbnumber' : 61, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator'", "'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : { 'number' : 447, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt',", "}, 'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : { 'number' : 459, 'flags' :", "}, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to GLX_ARB_create_context_profile not needed - see", "}, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : { 'number' : 167, 'flags' :", "'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : { 'number' : 302, 'flags'", "{ 'number' : 341, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'esnumber' : 255, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture'", ": 83, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' :", "'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : { 'number' : 14, 'flags' : {", "'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc'", ": { 'esnumber' : 289, 'flags' : { 'public' }, 'supporters' : {", ": 400, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' :", "'GL_EXT_draw_transform_feedback' : { 'esnumber' : 272, 'flags' : { 'public' }, 'url' :", "450, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : {", "'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : { 'number' : 521, 'esnumber' : 300, 'flags' :", "132, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : {", "'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : { 'arbnumber' : 150, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt',", "'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : { 'number' : 81, 'flags' : { 'public' },", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : {", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt',", "'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : { 'number' : 113, 'flags'", "'GL_EXT_polygon_offset_clamp' : { 'number' : 460, 'esnumber' : 252, 'flags' : { 'public'", "{ 'arbnumber' : 39, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : { 'arbnumber' :", ": { 'public' }, 'supporters' : { 'IBM', 'INGR', 'KGC', 'SGI' }, 'url'", "'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' : 165, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : { 'arbnumber' :", "}, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : { 'number' : 160, 'flags' :", "{ 'number' : 309, 'esnumber' : 49, 'flags' : { 'public' }, 'supporters'", "66, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url'", ": 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : { 'number' : 147, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : { 'arbnumber' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt',", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : { 'number'", "'GL_SGIX_line_quality_hint' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority'", ": { 'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : { 'esnumber' :", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : { 'esnumber'", "'GL_EXT_shared_texture_palette' : { 'number' : 141, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : { 'esnumber' : 103, 'flags' : { 'public'", ": 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : { 'number' : 261, 'flags' : { 'public'", ": 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : { 'esnumber' : 26, 'flags' : { 'public'", ": 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' : 21, 'flags' : { 'public'", ": 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber' : 89, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt',", ": 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : { 'number' : 244, 'flags' : { 'public'", ": { 'incomplete', 'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : { 'number'", "}, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : {", "'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : { 'esnumber' : 88, 'flags'", "'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : { 'esnumber' : 107, 'flags'", "64, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : {", ": 466, 'esnumber' : 232, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : { 'arbnumber' :", "'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture'", "'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : {", "'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags'", "'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : { 'esnumber' : 256, 'flags' : { 'public' },", "93, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : { 'flags'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : { 'number' :", "'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' : { 'number' : 23,", "}, 'GL_OES_blend_func_separate' : { 'esnumber' : 2, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber' : 144, 'flags' : {", "'number' : 380, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : { 'number' :", "'supporters' : { 'MS', 'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : { 'arbnumber' :", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : { 'number' : 125,", "}, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : { 'flags' : { 'public' },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' :", "'number' : 455, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_QCOM_writeonly_rendering' : { 'esnumber' : 61, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : { 'esnumber'", "{ 'number' : 186, 'esnumber' : 60, 'flags' : { 'public' }, 'supporters'", "'GL_ARB_vertex_buffer_object' : { 'arbnumber' : 28, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : { 'number' : 355, 'flags' : {", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : {", ": 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : { 'number' : 76, 'flags' : { 'public'", "521, 'esnumber' : 300, 'flags' : { 'public' }, 'supporters' : { 'INTEL'", "'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : { 'esnumber' : 293, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : { 'number'", "'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : { 'number' : 452, 'flags' : { 'public' },", ": 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : { 'number' : 441, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : { 'esnumber' : 97, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : { 'number'", ": { 'esnumber' : 207, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt',", "{ 'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : { 'flags' : {", "{ 'arbnumber' : 37, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : { 'number' : 27, 'flags' : { 'public' },", "'GLX_EXT_libglvnd' : { 'number' : 482, 'flags' : { 'public' }, 'url' :", ": { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : { 'number' :", ": { 'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : { 'number' :", "'arbnumber' : 88, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64'", ": { 'arbnumber' : 42, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 359, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' : { 'incomplete', 'obsolete' },", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt',", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : { 'number'", "'number' : 134, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI'", "'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : { 'number' : 520, 'esnumber' : 122,", "{ 'number' : 245, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'comments' : 'Extension shipped but was not fully specified. Similar to ARB_texture_cube_map.', },", "{ 'number' : 146, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt', },", "'number' : 340, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'esnumber' : 53, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt',", "}, 'GL_QCOM_tiled_rendering' : { 'esnumber' : 70, 'flags' : { 'public' }, 'supporters'", "}, 'GL_ARB_texture_storage_multisample' : { 'arbnumber' : 141, 'flags' : { 'public' }, 'url'", "{ 'number' : 403, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "415, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : { 'arbnumber' : 122, 'flags' : { 'public' },", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : { 'number' : 151, 'flags'", "'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : { 'arbnumber' : 35, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : { 'esnumber' : 145, 'flags' :", "'number' : 209, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' :", ": { 'number' : 129, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : { 'esnumber' : 135, 'flags'", ": 214, 'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp'", "}, 'GL_EXT_shader_image_load_store' : { 'number' : 386, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : { 'number' : 280, 'flags' :", "'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' : 492,", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : { 'number'", ": 164, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url'", ": 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : { 'flags' : { 'incomplete' }, 'url' :", "}, 'GL_ARB_sparse_texture' : { 'arbnumber' : 158, 'flags' : { 'public' }, 'url'", "'GL_EXT_framebuffer_multisample' : { 'number' : 317, 'flags' : { 'public' }, 'url' :", "{ 'number' : 337, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : { 'esnumber' : 90, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', },", "'alias' : { 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' : { 'arbnumber' : 126, 'flags'", ": { 'esnumber' : 155, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt',", "'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'IBM', 'SGI', 'SUN'", ": 76, 'flags' : { 'public' }, 'supporters' : { 'TGS' }, 'url'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : { 'number' : 488, 'flags'", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' :", "'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : { 'arbnumber' : 183, 'flags' : {", "'GL_EXT_draw_elements_base_vertex' : { 'esnumber' : 204, 'flags' : { 'public' }, 'url' :", "197, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "}, 'GL_EXT_blend_logic_op' : { 'number' : 39, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', },", "}, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : { 'number' : 386, 'flags' :", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4'", ": { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : { 'arbnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear' },", "'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : { 'number' : 442, 'flags'", ": 418, 'esnumber' : 197, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : { 'number' : 292,", "'GL_SGIX_framezoom' : { 'number' : 57, 'flags' : { 'public' }, 'supporters' :", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : { 'number' : 382,", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : { 'arbnumber'", "{ 'arbnumber' : 142, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias'", "'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt',", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : { 'number' :", "'GL_EXT_draw_instanced' : { 'number' : 327, 'esnumber' : 157, 'flags' : { 'public'", "'GL_EXT_subtexture' : { 'number' : 9, 'flags' : { 'public' }, 'supporters' :", "{ 'arbnumber' : 101, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', },", "}, 'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : { 'arbnumber' : 72, 'flags' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : { 'number'", ": { 'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : { 'esnumber' :", "'3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : { 'number' : 361, 'flags'", "{ 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : { 'number' :", ": 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : { 'number' : 263, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', },", "'GL_NV_shader_thread_shuffle' : { 'number' : 448, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : { 'number' : 497, 'flags' :", "'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : { 'arbnumber' : 25, 'flags' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : {", ": 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : { 'arbnumber' : 172, 'flags' : { 'public'", "'arbnumber' : 152, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended'", "'number' : 326, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : { 'number' : 59, 'flags' : { 'incomplete' },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : { 'esnumber'", "'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : { 'number' : 264, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt',", "{ 'esnumber' : 42, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', },", "'esnumber' : 205, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures'", "}, 'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : { 'number' : 342, 'flags' :", ": { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt',", "{ 'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber' : 159,", "135, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : {", "'arbnumber' : 10, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": 78, 'flags' : { 'public' }, 'supporters' : { 'MS', 'SGI' },", ": 67, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' :", "'GL_ARB_polygon_offset_clamp' : { 'arbnumber' : 193, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : { 'arbnumber'", "'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : { 'number' : 247, 'flags'", "'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : { 'number' : 47, 'flags'", "{ 'number' : 430, 'esnumber' : 126, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : { 'number' :", "6, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'number' : 97, 'flags' : { 'public' }, 'supporters' : {", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' :", "}, 'GL_APPLE_fence' : { 'number' : 272, 'flags' : { 'public' }, 'supporters'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : { 'number' :", "}, 'GL_ATI_separate_stencil' : { 'number' : 289, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats'", "'arbnumber' : 192, 'esnumber' : 288, 'flags' : { 'public' }, 'url' :", ": 64, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' :", "'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : { 'number' : 223, 'flags'", "'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' : 5, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : { 'esnumber'", "'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : { 'arbnumber' : 172, 'flags' : {", "'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : { 'esnumber' : 147, 'flags' : {", ": { 'arbnumber' : 75, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt',", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt',", "}, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' : 155, 'flags' :", ": { 'number' : 315, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with arbnumber 56,", "'number' : 299, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : { 'arbnumber' : 120, 'flags' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float',", "'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : { 'arbnumber' : 161, 'flags'", "}, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : { 'number' : 373, 'esnumber' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt',", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' :", ": { 'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : { 'number' :", ": 167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' :", ": 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : { 'arbnumber' : 60, 'flags' : { 'public'", "{ 'arbnumber' : 126, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias'", ": 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : { 'number' : 102, 'flags' : { 'public'", ": 456, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : { 'number' :", "}, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : { 'number' : 238, 'flags' :", "'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : { 'number' : 1, 'flags' : {", "'GL_ARB_compute_variable_group_size' : { 'arbnumber' : 153, 'flags' : { 'public' }, 'url' :", "{ 'arbnumber' : 188, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', },", "{ 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' :", "}, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber' : 74, 'flags' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3'", "504, 'esnumber' : 281, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias'", ": 158, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' },", "}, 'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : { 'number' : 467, 'esnumber' :", "'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : { 'esnumber' : 214, 'flags'", "'GL_S3_s3tc' : { 'number' : 276, 'flags' : { 'public' }, 'supporters' :", "'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : { 'arbnumber' : 104, 'flags' : { 'public' },", "51, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : {", "{ 'esnumber' : 184, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', },", "77, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : {", ": 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : { 'number' : 250, 'flags' : { 'public'", "'number' : 309, 'esnumber' : 49, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : { 'number' : 185,", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : { 'flags' : { 'incomplete'", ": 34, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : { 'arbnumber' : 16, 'flags'", "'supporters' : { 'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : { 'flags'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint' }, },", "112, 'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url' :", "'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' : 118, 'esnumber'", "'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : { 'number' : 235, 'flags' : { 'public' },", ": { 'esnumber' : 137, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt',", "'esnumber' : 280, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' :", ": 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : { 'esnumber' : 75, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : { 'arbnumber'", "'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : { 'number' : 475, 'esnumber'", "'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : { 'arbnumber' : 93, 'flags' : {", "'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' : { 'arbnumber' : 47,", "382, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : { 'number' : 59, 'flags' : {", ": 150, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' :", ": { 'number' : 517, 'esnumber' : 297, 'flags' : { 'public' },", "'number' : 269, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : { 'esnumber' :", ": { 'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : { 'number' :", ": 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : { 'number' : 444, 'flags' : { 'public'", "{ 'arbnumber' : 112, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', },", "{ 'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : { 'number' : 385,", ": { 'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' },", ": 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : { 'esnumber' : 43, 'flags' : { 'public'", "'number' : 229, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' :", "'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : { 'esnumber' : 146, 'flags'", "'GL_OES_paletted_texture' : { 'esnumber' : 13, 'flags' : { 'incomplete', 'private' }, 'comments'", "'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber' : 143, 'flags' : {", ": { 'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : { 'number' :", "'GL_EXT_EGL_image_storage' : { 'number' : 522, 'esnumber' : 301, 'flags' : { 'public'", "494, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt',", ": { 'number' : 213, 'flags' : { 'public' }, 'supporters' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : { 'arbnumber' : 48,", "'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : { 'number' : 282, 'flags'", "'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number' : 519, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI' }, 'url' :", ": 387, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "185, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' :", "'number' : 352, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA'", "'GL_EXT_texture_filter_minmax' : { 'number' : 464, 'esnumber' : 227, 'flags' : { 'public'", "481, 'esnumber' : 246, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', },", "{ 'number' : 82, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", ": { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : { 'number'", "}, 'GL_ARB_shader_precision' : { 'arbnumber' : 98, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to GLX_ARB_create_context_profile not", "158, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : {", "'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : { 'arbnumber' : 153, 'flags' : {", "{ 'ES', 'INGR', 'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : { 'number'", "'comments' : 'Partial HP support.', }, 'GL_SGI_complex' : { 'number' : 87, 'flags'", "128, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : {", "'esnumber' : 23, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external'", "'GL_ARB_transform_feedback_instanced' : { 'arbnumber' : 109, 'flags' : { 'public' }, 'url' :", "'number' : 18, 'flags' : { 'public' }, 'supporters' : { 'ES', 'SGI'", "'GL_SGIS_multisample' : { 'number' : 25, 'flags' : { 'public' }, 'supporters' :", "'GL_EXT_packed_depth_stencil' : { 'number' : 312, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : { 'arbnumber' : 192, 'esnumber' : 288,", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export'", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : { 'esnumber'", ": { 'esnumber' : 127, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_packed_float.txt',", "}, 'GL_EXT_geometry_shader' : { 'esnumber' : 177, 'flags' : { 'public' }, 'url'", "'alias' : { 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' : { 'esnumber' : 35, 'flags'", "}, 'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : { 'number' : 471, 'esnumber' :", "'GL_IGLOO_viewport_offsetXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', },", "'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : { 'number' : 412, 'flags'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : { 'number'", ": 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : { 'arbnumber' : 94, 'flags' : { 'public'", "}, 'GL_EXT_gpu_shader5' : { 'esnumber' : 178, 'flags' : { 'public' }, 'url'", "{ 'arbnumber' : 89, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', },", ": { 'number' : 422, 'flags' : { 'public' }, 'supporters' : {", "291, 'esnumber' : 4, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "}, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : { 'esnumber' : 245, 'flags' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : { 'number'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : {", "'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : { 'number' : 86, 'flags' : {", "{ 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : {", ": 435, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": { 'arbnumber' : 35, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 176, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex'", "'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : { 'esnumber' : 238, 'flags' : {", ": { 'number' : 237, 'flags' : { 'public' }, 'supporters' : {", "225, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : {", ": 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : { 'number' : 140, 'flags' : { 'incomplete'", ": 2, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC',", ": { 'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : { 'esnumber' :", "}, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' :", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : { 'esnumber' : 92,", "'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : { 'number' : 318, 'flags' : { 'public' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : {", "{ 'number' : 267, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : { 'number' : 428,", "'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : { 'esnumber' : 105, 'flags' : {", "'GL_APPLE_specular_vector' : { 'number' : 159, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : { 'arbnumber' : 188, 'flags'", "'GL_ARB_shading_language_packing' : { 'arbnumber' : 116, 'flags' : { 'public' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : { 'esnumber' : 247,", "115, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : {", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', },", ": { 'public' }, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', },", "422, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : { 'arbnumber' : 158, 'flags' : {", "72, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : {", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query'", "'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber' : 193, 'flags' : {", "'GLX_EXT_visual_rating' : { 'number' : 44, 'flags' : { 'public' }, 'supporters' :", "171, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber'", ": 163, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : { 'number' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' : 135,", "'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : { 'number' : 205,", ": { 'number' : 340, 'flags' : { 'public' }, 'supporters' : {", "256, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", ": { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : { 'number' :", "{ 'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : { 'number' : 345,", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : { 'arbnumber'", "{ 'number' : 385, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "{ 'esnumber' : 87, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt', },", "}, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' :", "'flags' : { 'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : { 'number'", "'number' : 280, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "{ 'esnumber' : 68, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt', },", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample' }, },", ": { 'number' : 186, 'esnumber' : 60, 'flags' : { 'public' },", ": 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : { 'number' : 64, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : { 'number'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, },", "'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : { 'esnumber' : 107, 'flags' : { 'public' },", "'number' : 266, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : { 'number'", ": 204, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt',", "'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber' : 46, 'flags'", ": 23, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' :", "'number' : 301, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "}, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : { 'number' : 95, 'flags' :", "'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : { 'number' : 153, 'flags' : { 'obsolete' },", ": { 'number' : 333, 'flags' : { 'public' }, 'supporters' : {", "'GL_AMD_blend_minmax_factor' : { 'number' : 404, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : { 'arbnumber' : 71, 'flags' : {", ": { 'number' : 295, 'esnumber' : 17, 'flags' : { 'public' },", ": 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : { 'arbnumber' : 175, 'esnumber' : 243, 'flags'", "15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : { 'number' : 127, 'flags' : {", "327, 'esnumber' : 157, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : { 'esnumber' : 86, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt',", "329, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : { 'number'", ": { 'esnumber' : 210, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt',", "'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : { 'arbnumber' : 167, 'flags' : {", "'esnumber' : 204, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced'", "499, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : { 'arbnumber' : 53, 'flags' : { 'public'", "{ 'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number' : 485,", "'GL_OES_texture_float' : { 'esnumber' : 36, 'flags' : { 'public' }, 'url' :", ": 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : { 'esnumber' : 85, 'flags' : { 'public'", "}, 'GL_ARM_rgba8' : { 'esnumber' : 82, 'flags' : { 'public' }, 'url'", "'number' : 308, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt',", "}, 'GL_NV_vertex_array_range2' : { 'number' : 232, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : { 'number'", ": 470, 'esnumber' : 233, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt',", "{ 'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : { 'number' : 229,", ": 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : { 'number' : 256, 'flags' : { 'public'", "{ 'number' : 188, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, }, 'GL_EXT_shader_group_vote' : { 'esnumber' : 254, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4'", "}, 'GL_SGIX_scalebias_hint' : { 'number' : 236, 'flags' : { 'incomplete' }, 'supporters'", "'number' : 358, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture'", "{ 'arbnumber' : 78, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', },", "{ 'esnumber' : 151, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', },", ": { 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' : { 'arbnumber' : 75, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt',", "'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval'", "{ 'number' : 96, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : { 'esnumber'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : { 'number' : 460, 'esnumber' :", "'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : { 'number' : 482, 'flags' : {", "'arbnumber' : 195, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "}, 'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : { 'number' : 473, 'esnumber' :", "'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' : 86, 'flags'", "'esnumber' : 95, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : { 'number' : 211,", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : {", ": 17, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' : { 'esnumber' : 22,", "}, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : { 'number' : 133, 'flags' :", "'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : { 'arbnumber' : 124, 'flags' : { 'public' },", "}, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : { 'number' : 143, 'flags' :", "'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' :", "'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : { 'number' : 229, 'flags'", "'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' :", "'WGL_ARB_pbuffer' : { 'arbnumber' : 11, 'flags' : { 'public' }, 'supporters' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : { 'number' : 338, 'flags'", "'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : { 'number' : 222, 'esnumber'", "'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt',", ": 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : { 'arbnumber' : 85, 'flags' : { 'public'", ": { 'number' : 6, 'flags' : { 'public' }, 'supporters' : {", "475, 'esnumber' : 196, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', },", ": { 'number' : 379, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 476, 'esnumber' : 237, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : { 'number' : 316, 'flags' : {", "{ 'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : { 'esnumber' : 23,", "'number' : 152, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', },", ": { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : { 'number'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', },", "}, 'GL_NV_video_capture' : { 'number' : 374, 'flags' : { 'public' }, 'supporters'", "}, 'GL_ARB_shading_language_420pack' : { 'arbnumber' : 108, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : { 'arbnumber' : 40, 'flags'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : { 'arbnumber' : 31,", ": 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : { 'number' : 508, 'esnumber' : 284, 'flags'", "'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : { 'number' : 502, 'flags'", "'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' : { 'arbnumber' : 119, 'esnumber' : 118, 'flags'", ": 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : { 'number' : 479, 'esnumber' : 242, 'flags'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : { 'esnumber' : 178,", "{ 'esnumber' : 142, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', },", "'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber' : 171, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : { 'arbnumber'", "'number' : 183, 'flags' : { 'public' }, 'supporters' : { 'SUN' },", "{ 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target'", "}, }, 'GL_NV_blend_minmax_factor' : { 'number' : 510, 'esnumber' : 285, 'flags' :", "}, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt',", "340, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : { 'arbnumber'", "'GL_EXT_texture_lod_bias' : { 'number' : 186, 'esnumber' : 60, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : { 'number' : 469,", ": { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber' :", "'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : { 'esnumber' : 173, 'flags' : { 'public' },", "{ 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : { 'number' :", ": { 'arbnumber' : 58, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : {", "500, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : { 'number'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' :", ": { 'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : { 'esnumber' :", "'number' : 264, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 442, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "}, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' : 92, 'flags' :", "'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : { 'number' : 511, 'flags' : { 'public' },", ": 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : { 'number' : 423, 'flags' : { 'public'", "'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : { 'number' : 25, 'flags' : { 'public' },", "'esnumber' : 214, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D'", "'GL_NV_conservative_raster_underestimation' : { 'number' : 518, 'flags' : { 'public' }, 'url' :", "'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' :", ": { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : { 'esnumber'", "9, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : { 'esnumber' : 93, 'flags'", ": { 'esnumber' : 248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt',", "'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : { 'number' : 362, 'flags'", "'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' : 166, 'flags'", "'GL_MTK_shader_binary' : { 'esnumber' : 244, 'flags' : { 'incomplete', 'private' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'SGI' }, 'url'", "82, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : {", "'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : { 'esnumber' : 102, 'flags' : { 'public' },", ": 113, 'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url'", "}, 'GL_QCOM_writeonly_rendering' : { 'esnumber' : 61, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber' : 129, 'flags' : {", ": 227, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'esnumber' : 137, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', },", "'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : { 'number' : 451, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' : { 'arbnumber'", "'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : { 'number' : 111, 'flags' : {", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode'", "'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : { 'esnumber' : 92, 'flags' : {", "'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : { 'number' : 449, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : { 'arbnumber' : 158, 'flags'", "'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' : { 'number' : 98, 'flags' : { 'public'", ": 348, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : { 'number' : 221,", "{ 'number' : 62, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : { 'esnumber' : 39, 'flags'", "{ 'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : { 'esnumber' : 1,", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : { 'number' :", ": 45, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' :", ": 118, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", ": 162, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' :", "'esnumber' : 69, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'IMG',", "}, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : { 'esnumber' : 150, 'flags' :", ": { 'number' : 414, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 12, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt', },", "{ 'number' : 16, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": { 'number' : 518, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt',", ": 210, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'GL_NV_path_rendering_shared_edge' : { 'number' : 471, 'esnumber' : 234, 'flags' : { 'public'", ": { 'number' : 233, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : { 'arbnumber' : 38, 'flags' : { 'public'", "}, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : { 'esnumber' : 209, 'flags' :", "'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : { 'esnumber' : 47, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber'", "{ 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : { 'number' : 493,", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : { 'number' :", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt',", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' :", "}, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : { 'number' : 260, 'flags' :", "{ 'number' : 139, 'flags' : { 'public' }, 'supporters' : { 'HP',", ": 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' : 99, 'flags' : { 'public'", "{ 'arbnumber' : 61, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', },", "281, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd'", "'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' : 173, 'flags' : { 'public' }, 'url' :", "'alias' : { 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' : { 'esnumber' : 22, 'flags'", "70, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : {", "406, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : { 'number' : 480, 'flags' :", "476, 'esnumber' : 237, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt', },", "{ 'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : { 'esnumber' : 121,", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : { 'esnumber'", "{ 'esnumber' : 208, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_copy_image.txt', },", "15, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : {", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', },", ": 149, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' :", ": { 'number' : 236, 'flags' : { 'incomplete' }, 'supporters' : {", "127, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : {", ": 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : { 'number' : 209, 'flags' : { 'public'", "}, 'GL_APPLE_texture_max_level' : { 'esnumber' : 80, 'flags' : { 'public' }, 'url'", ": 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : { 'number' : 463, 'esnumber' : 259, 'flags'", ": 174, 'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url'", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt',", ": 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : { 'number' : 53, 'flags' : { 'public'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : { 'esnumber' :", "'esnumber' : 298, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on Visual Workstation 320 /", ": { 'number' : 326, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : { 'number'", "'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : {", "'GL_EXT_pixel_buffer_object' : { 'number' : 302, 'flags' : { 'public' }, 'supporters' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : { 'esnumber' : 93,", "'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' : 409, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'HP', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt',", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : { 'number' :", "{ 'esnumber' : 206, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt', },", "'GL_NV_texture_shader' : { 'number' : 230, 'flags' : { 'public' }, 'supporters' :", ": 57, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'number' : 521, 'esnumber' : 300, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : { 'arbnumber' : 164, 'flags' : {", ": 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : { 'number' : 94, 'flags' : { 'public'", "91, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance'", ": { 'esnumber' : 5, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt',", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', },", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : { 'number'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : { 'esnumber'", ": { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt',", "}, 'GL_ARB_shading_language_include' : { 'arbnumber' : 76, 'flags' : { 'public' }, 'url'", "'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : { 'number' : 130, 'flags' : { 'incomplete' },", "{ 'number' : 357, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : { 'number' : 318, 'flags' : { 'public'", "'arbnumber' : 189, 'esnumber' : 249, 'flags' : { 'public' }, 'url' :", ": 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : { 'esnumber' : 214, 'flags' : { 'public'", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' :", "'number' : 113, 'flags' : { 'public' }, 'supporters' : { 'MS' },", "'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : { 'esnumber' : 240, 'flags' : {", "'GL_OES_blend_subtract' : { 'esnumber' : 3, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber' : 194, 'flags' :", "180, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : { 'number' : 52, 'flags'", "}, 'GL_OES_compressed_paletted_texture' : { 'number' : 294, 'esnumber' : 6, 'flags' : {", "}, 'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : { 'number' : 131, 'flags' :", ": { 'number' : 164, 'flags' : { 'public' }, 'supporters' : {", "'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber' : 129, 'flags' : { 'public' },", "'number' : 370, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", ": 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : { 'number' : 61, 'flags' : { 'public'", "'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : { 'number' : 286, 'flags' : { 'public' },", "15, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "2, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : {", "'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : { 'flags' : { 'incomplete' }, 'url'", ": 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : { 'number' : 362, 'flags' : { 'public'", "'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : { 'esnumber'", "}, 'GL_EXT_422_pixels' : { 'number' : 178, 'flags' : { 'public' }, 'supporters'", "'GL_NV_fragment_program' : { 'number' : 282, 'flags' : { 'public' }, 'supporters' :", ": { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' : { 'esnumber' : 177, 'flags'", "'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that the", "'GL_SGIX_list_priority' : { 'number' : 80, 'flags' : { 'public' }, 'supporters' :", ": { 'arbnumber' : 12, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : { 'number' : 328, 'flags'", ": { 'number' : 39, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' : 264, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : { 'esnumber'", "}, 'GL_EXT_vertex_array_bgra' : { 'number' : 354, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : { 'flags'", "'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : { 'number' : 246, 'flags' : { 'public' },", ": 154, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : {", ": { 'number' : 177, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : {", "99, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : { 'esnumber' : 115, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', },", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control'", ": { 'arbnumber' : 23, 'flags' : { 'public' }, 'supporters' : {", "'GL_AMD_sparse_texture' : { 'number' : 426, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : {", "}, 'GL_SGIS_texture_lod' : { 'number' : 24, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : { 'arbnumber' : 98, 'flags' :", "'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : { 'number' : 245, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : { 'number'", "{ 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' : { 'esnumber' : 112, 'flags' :", "'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : { 'arbnumber' : 7, 'flags' : { 'public' },", "}, 'GLX_SGIX_visual_select_group' : { 'number' : 234, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : { 'number' : 1,", "477, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : {", "'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : { 'arbnumber' : 94, 'flags'", "}, 'GL_NV_compute_program5' : { 'number' : 421, 'flags' : { 'public' }, 'supporters'", "{ 'arbnumber' : 127, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', },", "'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : { 'number' : 356, 'flags'", "'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : { 'number' : 30, 'flags' : { 'public' },", "'GL_ARB_seamless_cube_map' : { 'arbnumber' : 65, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : { 'number'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : { 'flags' : { 'incomplete'", "}, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : { 'flags' : { 'incomplete' },", "}, 'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : { 'number' : 221, 'flags' :", "{ 'number' : 248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', },", ": 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : { 'arbnumber' : 164, 'flags' : { 'public'", "418, 'esnumber' : 197, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' : 86, 'flags' : { 'public'", ": 334, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : { 'number' : 38, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float' },", "}, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : {", "{ 'number' : 361, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "143, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GLX_SGIX_swap_barrier' : { 'number' : 92, 'flags' : { 'public' }, 'supporters'", "}, 'GL_SGIX_shadow' : { 'number' : 34, 'flags' : { 'public' }, 'supporters'", ": 237, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : { 'number' : 315, 'flags' : { 'public' },", "'number' : 92, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'number' : 466, 'esnumber' : 232, 'flags' : { 'public' }, 'url' :", "{ 'esnumber' : 267, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt', },", ": 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : { 'esnumber' : 173, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : { 'arbnumber'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : { 'arbnumber'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' :", "11, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt',", ": { 'number' : 34, 'flags' : { 'public' }, 'supporters' : {", ": 59, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "}, 'GL_OES_tessellation_shader' : { 'esnumber' : 214, 'flags' : { 'public' }, 'url'", ": { 'arbnumber' : 189, 'esnumber' : 249, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts'", ": { 'arbnumber' : 140, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt',", "'WGL_EXT_depth_float' : { 'number' : 177, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 417, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' :", ": { 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', },", ": { 'number' : 522, 'esnumber' : 301, 'flags' : { 'public' },", "145, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : {", "'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' : { 'arbnumber' : 58, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' :", "'number' : 323, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : { 'arbnumber' : 8,", "27, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": 363, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : { 'number'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', },", "'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber' : 142, 'flags' : { 'public' },", ": { 'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : { 'number' :", "{ 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP support.',", "'supporters' : { 'ES', 'HP', 'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' :", "{ 'number' : 190, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'esnumber' : 205, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt',", "'esnumber' : 259, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array'", "'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : { 'arbnumber' :", "'GL_NV_texture_multisample' : { 'number' : 403, 'flags' : { 'public' }, 'supporters' :", "'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' : 188,", "'GL_NV_blend_equation_advanced' : { 'number' : 433, 'esnumber' : 163, 'flags' : { 'public'", "{ 'number' : 457, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, }, 'GLX_SGIX_video_resize' : { 'number' : 83, 'flags' : { 'public' },", "'GL_SGIS_shared_multisample' : { 'number' : 143, 'flags' : { 'incomplete' }, 'supporters' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : { 'number' :", ": 143, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' :", ": { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample' }, },", "{ 'esnumber' : 81, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', },", "'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : { 'number' : 353, 'flags' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : { 'esnumber' : 105,", "'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : { 'esnumber' : 70, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : { 'arbnumber' : 160, 'flags' :", "'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : { 'esnumber' : 207, 'flags' : { 'public' },", "'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : { 'number' : 111, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt', },", ": { 'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : { 'esnumber' :", "}, 'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : { 'number' : 261, 'flags' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : { 'number' : 331, 'flags'", "'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : { 'number' : 373, 'esnumber' : 76,", "'esnumber' : 292, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : { 'number' : 426,", "{ 'arbnumber' : 158, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', },", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' :", "}, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt',", "{ 'number' : 288, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'GL_INTEL_blackhole_render' : { 'number' : 521, 'esnumber' : 300, 'flags' : { 'public'", "'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : { 'arbnumber' : 40, 'flags' : { 'public' },", "{ 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' : { 'esnumber' : 72, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : { 'arbnumber' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture'", "{ 'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : { 'esnumber' : 29,", "'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : { 'number' : 153, 'flags' : {", "'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to GLX_ARB_create_context_profile not needed - see arbnumber 75.', },", "{ 'arbnumber' : 9, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'GL_ARB_matrix_palette' : { 'arbnumber' : 16, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' :", "41, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : { 'number' :", "'number' : 65, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "}, 'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : { 'number' : 243, 'flags' :", "142, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : {", "'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : { 'arbnumber' : 98, 'flags' : { 'public' },", ": 147, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' :", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : { 'number' : 20,", "'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : { 'arbnumber' : 45, 'flags'", ": 156, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' :", "{ 'arbnumber' : 87, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', },", "'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : { 'number' : 176,", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' },", "{ 'number' : 80, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : { 'esnumber' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : { 'arbnumber' :", "109, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : {", "{ 'esnumber' : 111, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias'", "{ 'arbnumber' : 131, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', },", ": { 'number' : 96, 'flags' : { 'public' }, 'supporters' : {", "'number' : 249, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : { 'arbnumber' : 6, 'flags' : { 'public' },", ": 177, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : {", ": 116, 'flags' : { 'obsolete' }, 'supporters' : { 'SGI' }, 'url'", "'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : { 'arbnumber' : 88, 'flags'", "}, 'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : { 'number' : 388, 'flags' :", "'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : { 'number' : 148, 'esnumber' : 69,", "{ 'number' : 45, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : { 'number' : 46, 'flags' : { 'obsolete' },", ": { 'public' }, 'supporters' : { 'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt', },", "'GL_MESA_tile_raster_order' : { 'number' : 515, 'esnumber' : 292, 'flags' : { 'public'", ": 385, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber' : 204, 'flags'", "'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : { 'number' : 164, 'flags'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : {", "'GL_ARB_vertex_array_object' : { 'arbnumber' : 54, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : { 'number' : 228, 'flags' : { 'public' },", "}, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : { 'number' : 253, 'flags' :", "'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' : { 'esnumber' : 254, 'flags'", "}, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : { 'esnumber' : 183, 'flags' :", "'GL_OES_point_sprite' : { 'esnumber' : 15, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt',", "{ 'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : { 'esnumber' : 212,", ": { 'MESA', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : { 'number'", ": 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : { 'number' : 46, 'flags' : { 'obsolete'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : { 'arbnumber' : 113,", ": { 'number' : 38, 'flags' : { 'public' }, 'supporters' : {", "'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' : 169, 'esnumber' : 189, 'flags' : { 'public'", "'GL_ARB_texture_non_power_of_two' : { 'arbnumber' : 34, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_EXT_separate_specular_color' : { 'number' : 144, 'flags' : { 'public' }, 'url'", "{ 'number' : 460, 'esnumber' : 252, 'flags' : { 'public' }, 'url'", "{ 'esnumber' : 169, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt', },", "that the OpenGL extension with the same name string.', }, 'GL_EXT_separate_specular_color' : {", "}, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : { 'arbnumber' : 44, 'flags' :", "'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : { 'number' : 444, 'flags' : { 'public' },", ": 265, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : { 'number' : 287, 'flags' :", ": { 'number' : 420, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_EXT_texture_lod_bias' : { 'number' : 186, 'esnumber' : 60, 'flags' : {", "{ 'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : { 'esnumber' : 148,", ": 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' : { 'number' :", "}, 'GL_OML_subsample' : { 'number' : 240, 'flags' : { 'public' }, 'supporters'", "'arbnumber' : 160, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent' }, },", "315, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : { 'number' : 260, 'flags' : { 'public' },", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : { 'number' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : { 'number' : 393,", "30, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : {", ": 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : { 'number' : 60, 'flags' : { 'public'", ": 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : { 'arbnumber' : 158, 'flags' : { 'public'", ": { 'number' : 278, 'flags' : { 'public' }, 'supporters' : {", "'number' : 181, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": { 'number' : 1, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { '3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt',", "'public' }, 'supporters' : { 'ES', 'HP', 'IBM', 'SGI', 'SUN' }, 'url' :", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' :", "}, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number' : 519, 'flags' :", "}, 'GL_EXT_map_buffer_range' : { 'esnumber' : 121, 'flags' : { 'public' }, 'url'", "{ 'number' : 356, 'flags' : { 'public' }, 'supporters' : { 'IdSoftware',", "'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : { 'number' : 423, 'flags'", "{ 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : { 'number' : 115,", ": 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : { 'number' : 412, 'flags' : { 'public'", "}, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : { 'number' : 478, 'esnumber' :", "}, 'GL_ARB_geometry_shader4' : { 'arbnumber' : 47, 'flags' : { 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : { 'arbnumber' : 160, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : { 'number'", ": 228, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' :", ": 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : { 'number' : 188, 'flags' : { 'public'", "{ 'esnumber' : 46, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', },", "'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : { 'arbnumber' : 29, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : { 'arbnumber'", "}, 'supporters' : { 'GOOGLE', 'NVIDIA', 'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object'", ": 110, 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url'", "246, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' :", "'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' : 253, 'flags' : { 'public' }, 'url' :", "'esnumber' : 159, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample'", "'number' : 442, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": { 'esnumber' : 177, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt',", "'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : { 'number' : 93, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt',", "152, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming'", "'arbnumber' : 73, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle'", "'esnumber' : 131, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : { 'arbnumber' :", "'WGL_3DL_stereo_control' : { 'number' : 313, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : { 'arbnumber' : 113, 'flags' : {", "'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : { 'arbnumber' : 9, 'flags'", "'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : { 'number' : 43, 'flags' : { 'public' },", "169, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url'", "435, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", ": { 'number' : 205, 'flags' : { 'incomplete' }, 'supporters' : {", ": 32, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": { 'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : { 'number' :", "}, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : { 'number' : 516, 'esnumber' :", "'esnumber' : 257, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint'", "'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : { 'number' : 364, 'flags'", "141, 'flags' : { 'public' }, 'supporters' : { '3DFX', '3DL', 'SGI' },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt',", "'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : { 'number' : 513, 'flags'", "'number' : 257, 'flags' : { 'public' }, 'supporters' : { 'SUN' },", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives'", "'number' : 406, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : { 'number'", "'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : { 'number' : 299, 'flags' : {", ": 392, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions'", "}, 'supporters' : { 'MESA', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : { 'number' :", ": 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' : 253, 'flags' : { 'public'", ": 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : { 'number' : 158, 'flags' : { 'public'", "270, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : { 'number' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : { 'number' : 79,", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : { 'number' : 233,", "{ 'number' : 167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', },", "'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : { 'arbnumber' : 184, 'flags' : {", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : { 'number' : 125, 'flags'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : { 'number' : 61,", ": 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : { 'number' : 183, 'flags' : { 'public'", "}, 'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : { 'number' : 276, 'flags' :", "'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : { 'esnumber' : 155, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' :", ": 128, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' :", ": 131, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' :", "'number' : 345, 'flags' : { 'public' }, 'supporters' : { 'GREMEDY' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : { 'number' :", "{ 'esnumber' : 116, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt', },", ": 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : { 'number' : 89, 'flags' : { 'incomplete'", "'GL_AMD_gcn_shader' : { 'number' : 453, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : { 'esnumber' : 78, 'flags' : {", ": 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : { 'flags' : { 'incomplete' }, 'url' :", ": { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt',", ": { 'arbnumber' : 66, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sync.txt',", "'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : { 'number' : 388, 'flags'", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : { 'number'", "{ 'flags' : { 'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : {", "'GL_SGIS_point_line_texgen' : { 'number' : 213, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : {", "'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' : 275, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : { 'esnumber' : 172, 'flags' :", "'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : { 'esnumber' : 44, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt',", "}, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : { 'arbnumber' : 60, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : { 'arbnumber' : 90,", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', },", "}, 'GL_EXT_polygon_offset_clamp' : { 'number' : 460, 'esnumber' : 252, 'flags' : {", "{ 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX'", "'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : { 'number'", "}, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : { 'esnumber' : 48, 'flags' :", "'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : { 'number' : 297, 'flags'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : { 'arbnumber' : 72, 'flags'", "'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : { 'esnumber' : 94, 'flags' : { 'public' },", "}, }, 'GLX_SGIX_color_typeXXX' : { 'number' : 72, 'flags' : { 'incomplete' },", "'esnumber' : 43, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation' },", "'GL_AMD_multi_draw_indirect' : { 'number' : 408, 'flags' : { 'public' }, 'supporters' :", "'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : { 'number' : 139, 'flags' : { 'public' },", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : { 'number'", "'GL_EXT_texture_filter_anisotropic' : { 'number' : 187, 'esnumber' : 41, 'flags' : { 'public'", "'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' : 492, 'esnumber' : 266, 'flags' : { 'public'", "shipped but was not fully specified. Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : {", "'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : { 'number' : 329, 'flags' : {", ": { 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : {", "'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : { 'arbnumber' : 178, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : { 'number' : 173, 'flags' : {", "238, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "'GL_EXT_framebuffer_blit' : { 'number' : 316, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend'", "'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : { 'arbnumber' : 3, 'flags' : {", ": { 'number' : 289, 'flags' : { 'public' }, 'supporters' : {", "74, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : {", "82, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : {", "'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : { 'number' : 156, 'flags' : {", ": 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber' : 95, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : { 'number'", "}, 'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : { 'esnumber' : 173, 'flags' :", ": 120, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array'", "'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : { 'number' : 39, 'flags' : {", "'GL_SUNX_constant_data' : { 'number' : 163, 'flags' : { 'public' }, 'supporters' :", "}, 'GLX_MESA_swap_control' : { 'number' : 514, 'flags' : { 'public' }, 'supporters'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : { 'number' :", "'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : { 'number' : 261, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : { 'arbnumber'", "299, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : {", "'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : { 'esnumber' : 175, 'flags'", "'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : { 'esnumber' : 178, 'flags' : {", "'GL_EXT_discard_framebuffer' : { 'esnumber' : 64, 'flags' : { 'public' }, 'url' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : { 'flags' : {", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : {", "'GL_EXT_texture_env' : { 'number' : 146, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber'", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5'", "'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' : { 'number' : 83,", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : { 'esnumber' : 78, 'flags'", ": { 'esnumber' : 73, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt',", ": 164, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' :", "{ 'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : { 'number' : 226,", "'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : { 'esnumber' : 73, 'flags'", ": 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : { 'number' : 161, 'flags' : { 'incomplete'", "{ 'arbnumber' : 186, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt',", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt',", ": 362, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'GL_ARB_stencil_texturing' : { 'arbnumber' : 138, 'flags' : { 'public' }, 'url' :", "'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : { 'esnumber' : 66, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', },", "}, 'GLX_SGIX_video_resize_float' : { 'number' : 184, 'flags' : { 'incomplete', 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt',", ": { 'number' : 313, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber' : 109, 'flags' :", ": { 'esnumber' : 221, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : { 'number' :", "'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : { 'number' : 367, 'flags'", "390, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'esnumber' : 144, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', },", "'supporters' : { 'HP', 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' :", "154, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : { 'esnumber' : 34, 'flags' :", "'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : { 'number' : 175, 'flags' : {", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : { 'number' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : { 'esnumber'", "'GL_IMG_shader_binary' : { 'esnumber' : 68, 'flags' : { 'public' }, 'url' :", ": { 'number' : 21, 'flags' : { 'public' }, 'supporters' : {", "'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : {", "'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : { 'number' : 472, 'esnumber'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : { 'arbnumber'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : { 'number' : 60,", ": 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : { 'flags' : { 'incomplete', 'obsolete' }, 'url'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : { 'number'", ": 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : { 'number' : 395, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : { 'number' : 94, 'flags' :", "196, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary'", "'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : { 'number' : 229, 'flags' : { 'public' },", ": 409, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' },", "'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : { 'number' : 10, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : {", "'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' :", "{ 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt', },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', },", ": { 'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : { 'esnumber' :", "}, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : { 'number' : 42, 'flags' :", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker'", "459, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : {", "'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt',", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : { 'number'", "}, 'GL_OES_EGL_sync' : { 'esnumber' : 75, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt',", "{ 'arbnumber' : 161, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', },", "}, 'GL_OES_byte_coordinates' : { 'number' : 291, 'esnumber' : 4, 'flags' : {", "'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : { 'esnumber' : 119, 'flags' : { 'public' },", "{ 'arbnumber' : 157, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', },", "'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' :", "2, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast'", "'number' : 260, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : { 'esnumber' :", "}, 'GL_SGIX_slim' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt', },", "74, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with", ": { 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' : { 'esnumber' : 22, 'flags' :", "WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : { 'arbnumber' : 102, 'flags' : { 'public' },", "}, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber' : 171, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber' : 125, 'flags'", "'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : { 'esnumber' : 217, 'flags' : { 'public' },", "'number' : 412, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_NV_framebuffer_blit' : { 'esnumber' : 142, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : { 'number' :", "}, 'GL_NV_viewport_array' : { 'esnumber' : 202, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : { 'arbnumber'", "'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : { 'number' : 248, 'flags' : { 'public' },", "124, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : {", "22, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : { 'number' : 271,", ": 42, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'GL_ARB_vertex_array_bgra' : { 'arbnumber' : 68, 'flags' : { 'public' }, 'url' :", "'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber' : 51, 'flags'", "'GL_NV_vertex_buffer_unified_memory' : { 'number' : 380, 'flags' : { 'public' }, 'supporters' :", "{ 'esnumber' : 48, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', },", "'esnumber' : 268, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution'", ": 219, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' :", ": { 'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : { 'esnumber' :", "}, 'GL_NV_multisample_coverage' : { 'number' : 393, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' :", "'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber' : 34, 'flags' : { 'public' },", "}, 'GL_EXT_buffer_storage' : { 'esnumber' : 239, 'flags' : { 'public' }, 'url'", "'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : { 'number' : 146,", "'esnumber' : 164, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt',", "}, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : { 'number' : 357, 'flags' :", ": 75, 'flags' : { 'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' :", ": { 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', },", "{ 'esnumber' : 44, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', },", "'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : { 'number' : 35, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : { 'number' : 130,", "'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' : { 'arbnumber' : 168,", "51, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'esnumber' : 195, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', },", ": 21, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : { 'number' : 410,", ": { 'esnumber' : 182, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt',", ": 381, 'esnumber' : 271, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' :", "'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters'", ": 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : { 'number' : 75, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : { 'number'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt',", ": { 'arbnumber' : 3, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt',", "}, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : { 'number' : 35, 'flags' :", "'number' : 51, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": 290, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' :", ": { 'number' : 150, 'flags' : { 'incomplete' }, 'supporters' : {", ": 150, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : { 'esnumber' : 206,", ": { 'number' : 286, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 152, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt',", "'GL_OES_shader_image_atomic' : { 'esnumber' : 171, 'flags' : { 'public' }, 'url' :", "'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : { 'number' : 52, 'flags' : { 'public' },", "'number' : 315, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'arbnumber' : 102, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string'", "'esnumber' : 282, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' :", "343, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' : { 'esnumber' : 22, 'flags' : {", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe'", ": 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : { 'number' : 442, 'flags' : { 'public'", "}, 'supporters' : { 'HP', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt', },", ": 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : { 'number' : 168, 'flags' : { 'public'", ": 132, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' :", "}, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : { 'number' : 382, 'flags' :", ": 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that the OpenGL extension with the same name", ": { 'number' : 520, 'esnumber' : 122, 'flags' : { 'public' },", "'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : { 'esnumber' : 23, 'flags' : {", ": 83, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'number' : 251, 'flags' : { 'public' }, 'supporters' : { 'I3D' },", "'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : { 'number' : 398, 'flags' : { 'public' },", "{ 'number' : 405, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "}, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : { 'arbnumber' : 50, 'flags' :", "'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : { 'number' : 193, 'flags' : {", "{ 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' : { 'number' : 98, 'flags' : {", "'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : { 'number' : 286, 'flags' : {", ": 18, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "}, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber' : 104, 'flags' : { 'public' }, 'url'", ": 317, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' :", "'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : { 'arbnumber' : 91, 'flags' : { 'public' },", "}, 'GL_ARB_instanced_arrays' : { 'arbnumber' : 49, 'flags' : { 'public' }, 'supporters'", ": 84, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : { 'esnumber' : 218, 'flags' : {", "'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : { 'number' : 371, 'flags'", "'GL_OES_fragment_precision_high' : { 'esnumber' : 28, 'flags' : { 'public' }, 'url' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : { 'number' : 134, 'flags'", "'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : { 'number' : 152, 'flags' : {", "'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : { 'number' : 394, 'flags'", ": 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : { 'number' : 359, 'flags' : { 'public'", "133, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url'", "233, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "with the same name string.', }, 'GL_EXT_separate_specular_color' : { 'number' : 144, 'flags'", "'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber' : 54, 'flags' : { 'public' },", ": 292, 'esnumber' : 9, 'flags' : { 'public' }, 'supporters' : {", "'GL_EXT_debug_label' : { 'number' : 439, 'esnumber' : 98, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : { 'number' : 479,", "'GLX_SGIX_hyperpipe' : { 'number' : 307, 'flags' : { 'public' }, 'supporters' :", ": 163, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : { 'arbnumber' : 85,", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : { 'arbnumber'", "'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : { 'number' : 326,", ": 270, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "{ 'arbnumber' : 191, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments'", "'GL_NV_texture_expand_normal' : { 'number' : 286, 'flags' : { 'public' }, 'supporters' :", "'Different that the OpenGL extension with the same name string.', }, 'GL_EXT_separate_specular_color' :", "{ 'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : { 'esnumber' : 172,", "'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : { 'number' : 267, 'flags' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image'", "273, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : {", "'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber' : 171, 'flags' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : {", ": { 'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : { 'number' :", "'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : { 'arbnumber' : 132, 'flags'", ": { 'number' : 478, 'esnumber' : 241, 'flags' : { 'public' },", "}, }, 'GL_OES_texture_float_linear' : { 'esnumber' : 35, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : { 'number' : 328, 'flags' : {", "'esnumber' : 153, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker'", "{ 'number' : 311, 'flags' : { 'public' }, 'supporters' : { 'GREMEDY'", "'esnumber' : 34, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp'", "159, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "}, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : { 'number' : 169, 'flags' :", "}, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' : {", "'arbnumber' : 36, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : { 'number'", "'GL_SGIX_decimation' : { 'number' : 125, 'flags' : { 'incomplete' }, 'supporters' :", "referred to by some other vendor extensions, but shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage'", "'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : { 'number' : 187, 'esnumber' : 41, 'flags' :", ": { 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' : 189, 'esnumber' :", ": 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber' : 54, 'flags' : { 'public'", "}, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : { 'number' : 428, 'flags' :", "{ 'number' : 290, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt',", ": 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : { 'number' : 293, 'esnumber' : 18, 'flags'", ": 465, 'esnumber' : 228, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt',", ": { 'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : { 'esnumber' :", "}, 'GL_ARB_texture_border_clamp' : { 'arbnumber' : 13, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 360, 'esnumber' : 50, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : { 'arbnumber' : 107, 'flags'", ": { 'esnumber' : 26, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt',", "}, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' : 130, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : {", ": 215, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "{ 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' : { 'arbnumber' : 47, 'flags' :", "'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : { 'number' : 388, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : { 'number' : 27,", "'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : { 'number'", "}, 'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : { 'number' : 465, 'esnumber' :", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' :", ": 338, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'esnumber' : 64, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', },", ": 258, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url'", ": 74, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included", "'number' : 41, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' :", ": { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : { 'number' :", "}, 'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : { 'number' : 262, 'flags' :", "}, 'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : { 'esnumber' : 12, 'flags' :", "'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : { 'number' : 347, 'flags' : { 'public' },", ": 142, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : {", "{ 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' : { 'number' : 22, 'flags' : {", "'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : { 'number' : 162, 'flags' : {", "'esnumber' : 180, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers'", "'number' : 198, 'esnumber' : 154, 'flags' : { 'public' }, 'supporters' :", "294, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", ": { 'arbnumber' : 102, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : { 'arbnumber'", ": 357, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : { 'number' :", "{ 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : { 'number' : 136,", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : { 'esnumber' : 207,", "'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : { 'esnumber' : 171, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', },", "'GL_SGIX_texture_coordinate_clamp' : { 'number' : 235, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : { 'number' : 88, 'flags' :", ": { 'HP', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : { 'arbnumber' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : { 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' :", "{ 'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : { 'esnumber' : 31,", ": { 'ES', 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' :", ": 355, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' :", "'number' : 66, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN'", "132, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : {", ": { 'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : { 'number' :", ": 401, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber'", "'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : { 'number' : 249, 'flags' : {", "'number' : 522, 'esnumber' : 301, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : { 'esnumber' : 146, 'flags' : {", "{ 'number' : 179, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "21, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : {", "'GL_OVR_multiview2' : { 'number' : 479, 'esnumber' : 242, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt',", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : {", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : {", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object'", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : { 'number' : 353, 'flags' :", "117, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'GL_APPLE_transform_hint' : { 'number' : 160, 'flags' : { 'public' }, 'supporters' :", "'GL_ARB_framebuffer_object' : { 'arbnumber' : 45, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt',", ": 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : { 'number' : 30, 'flags' : { 'public'", "}, 'GL_OES_sample_variables' : { 'esnumber' : 170, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : { 'esnumber' : 141, 'flags' :", "}, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : { 'esnumber' : 67, 'flags' :", "'private' }, 'comments' : 'Draft spec location unknown.', }, 'GL_OES_point_size_array' : { 'esnumber'", "230, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : { 'number' : 130, 'flags' :", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt',", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : { 'number'", "'number' : 283, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : { 'esnumber' : 73, 'flags' : { 'public' },", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' },", "'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : { 'number' : 100, 'flags' : {", ": { 'esnumber' : 125, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt',", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : { 'number'", "}, 'GL_ARB_clip_control' : { 'arbnumber' : 160, 'flags' : { 'public' }, 'url'", "'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : { 'number' : 470, 'esnumber' : 233, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : { 'arbnumber' : 22, 'flags' :", ": { 'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : { 'esnumber' :", "{ 'number' : 286, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension spec with", "'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : { 'arbnumber' : 4, 'flags'", "'esnumber' : 274, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : { 'number' :", "{ 'number' : 498, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'arbnumber' : 34, 'flags' : { 'public' }, 'supporters' : {", ": 274, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "{ 'number' : 66, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'GL_NV_draw_vulkan_image' : { 'number' : 501, 'esnumber' : 274, 'flags' : { 'public'", ": 156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' :", "'esnumber' : 100, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil'", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : { 'number' :", ": 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : { 'number' : 358, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt',", "'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : { 'arbnumber' : 64, 'flags' : {", ": 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : { 'arbnumber' : 37, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : { 'number' :", ": 459, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' :", "378, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 139, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN' },", "}, }, 'GL_NV_fog_distance' : { 'number' : 192, 'flags' : { 'public' },", "{ 'number' : 282, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt',", ": 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' : { 'esnumber' :", "}, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : { 'number' : 522, 'esnumber' :", "}, 'comments' : 'Draft spec location unknown.', }, 'GL_OES_point_size_array' : { 'esnumber' :", "{ 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : { 'number' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : { 'number' : 465,", ": 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : { 'esnumber' : 48, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt',", "'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' :", "'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : { 'number' : 477, 'flags' : {", ": 26, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' :", "'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' : { 'number' : 21,", "'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' : { 'incomplete' }, 'url'", "{ 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' :", "}, 'GL_ARB_texture_query_lod' : { 'arbnumber' : 73, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, },", "'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : { 'number' : 341, 'flags' : {", ": { 'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : { 'number' :", "{ 'esnumber' : 214, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt', },", ": 202, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", ": { 'arbnumber' : 28, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : { 'number' : 58, 'flags' :", ": 102, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' :", ": { 'number' : 411, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number'", ": 44, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": 95, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' },", "'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : { 'number' : 102, 'flags' : { 'public' },", ": 310, 'flags' : { 'public' }, 'supporters' : { '3DL', 'ATI', 'INTEL',", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group'", "'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : { 'number' : 252, 'flags' : {", ": 399, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : { 'number' : 235, 'flags'", ": { 'arbnumber' : 143, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt',", "}, 'GL_ARB_vertex_blend' : { 'arbnumber' : 15, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 76, 'flags' : { 'public' }, 'supporters' : { 'TGS'", "84, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : {", "}, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber' : 299, 'flags' :", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' },", "{ 'number' : 449, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', },", "'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : { 'number' : 232, 'flags' : {", "}, 'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : { 'number' : 142, 'flags' :", "'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : { 'number' : 90, 'flags' : {", "'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : { 'esnumber' : 206, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : { 'arbnumber'", "'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber' : 34, 'flags' : {", "'number' : 403, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : { 'number' : 339, 'flags' : { 'public' },", "{ 'number' : 166, 'flags' : { 'public' }, 'supporters' : { 'SUN'", "'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : { 'number' : 490, 'esnumber' : 263,", "'GL_SGIX_cube_map' : { 'number' : 130, 'flags' : { 'incomplete' }, 'supporters' :", "}, 'GL_ARB_texture_barrier' : { 'arbnumber' : 167, 'flags' : { 'public' }, 'url'", "61, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : {", ": 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : { 'number' : 146, 'flags' : { 'public'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' :", ": { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to WGL_ARB_create_context_profile not", "263, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : { 'number' : 187, 'esnumber'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : { 'number' : 329, 'flags' :", "'GL_NV_fragment_program4' : { 'number' : 335, 'flags' : { 'public' }, 'supporters' :", "236, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GL_SGIX_texture_scale_bias' : { 'number' : 56, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : { 'arbnumber'", ": { 'number' : 65, 'flags' : { 'public' }, 'supporters' : {", "'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : { 'number' : 426, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt',", "}, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber' : 129, 'flags' :", "243, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : {", "'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' : 173, 'flags' : {", "{ 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : { 'number' :", "'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : { 'number' : 511, 'flags' : {", ": 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : { 'number' : 390, 'flags' : { 'public'", ": 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : { 'number' : 254, 'flags' : { 'public'", ": 56, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : { 'number'", "'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' : 155, 'flags' : { 'public' }, 'url' :", "'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : { 'number' : 258, 'flags'", "{ 'arbnumber' : 81, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt',", "'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : { 'arbnumber' : 20, 'flags' : { 'public' },", "}, 'GL_AMD_shader_ballot' : { 'number' : 497, 'flags' : { 'public' }, 'supporters'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : { 'arbnumber' : 17,", ": 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : { 'number' : 267, 'flags' : { 'public'", ": 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' : 111, 'flags' : { 'public'", "'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : { 'number' : 322, 'flags' : {", "}, 'GL_ARB_stencil_texturing' : { 'arbnumber' : 138, 'flags' : { 'public' }, 'url'", ": 239, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "'supporters' : { 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent'", ": { 'esnumber' : 110, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt',", "'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : { 'arbnumber' : 44, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber' : 89,", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : {", ": { 'number' : 497, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : { 'number' : 494, 'flags' : {", "'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : { 'flags' : { 'incomplete' }, 'url'", "}, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt',", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : { 'number' :", ": 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number' : 397, 'flags' : { 'public'", ": 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : { 'arbnumber' : 28, 'flags' : { 'public'", "}, 'GL_NV_multisample_filter_hint' : { 'number' : 259, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber' :", "'GL_MESA_shader_integer_functions' : { 'number' : 495, 'flags' : { 'public' }, 'supporters' :", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : { 'number' : 186,", "'GL_ARB_clip_control' : { 'arbnumber' : 160, 'flags' : { 'public' }, 'url' :", ": 191, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares", "411, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "}, 'GL_SGIX_color_table_index_mode' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', },", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : { 'arbnumber'", "}, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : { 'number' : 446, 'flags' :", ": 152, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'CodeWeavers', 'NVIDIA',", "{ 'number' : 368, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "}, 'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : { 'number' : 479, 'esnumber' :", "}, 'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' : 46, 'flags' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : { 'esnumber' :", "}, 'GL_EXT_debug_marker' : { 'number' : 440, 'esnumber' : 99, 'flags' : {", "'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : {", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' :", "229, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : { 'number' : 314, 'flags' : {", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : { 'arbnumber' :", "'GL_EXT_vertex_weighting' : { 'number' : 188, 'flags' : { 'public' }, 'supporters' :", "'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : { 'number' : 292, 'esnumber' : 9, 'flags' :", "'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : { 'number' : 495, 'flags' : {", "}, 'GL_SGIX_cylinder_texgen' : { 'number' : 140, 'flags' : { 'incomplete' }, 'supporters'", ": { 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI', 'SUN' }, 'url'", "}, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : { 'number' : 345, 'flags' :", "'esnumber' : 287, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_ARB_texture_filter_minmax' : { 'arbnumber' : 188, 'flags' : { 'public' }, 'url'", "'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : {", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : { 'esnumber'", ": 208, 'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url'", ": { 'number' : 47, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : { 'number'", ": { 'arbnumber' : 26, 'flags' : { 'public' }, 'supporters' : {", ": 279, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : { 'esnumber' : 208, 'flags' : {", ": 70, 'flags' : { 'public' }, 'supporters' : { 'QCOM' }, 'url'", ": 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : { 'number' : 506, 'esnumber' : 283, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : { 'number' :", "'number' : 210, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : { 'number' : 419, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : { 'arbnumber' : 8, 'flags'", "{ 'number' : 462, 'esnumber' : 226, 'flags' : { 'public' }, 'url'", "'GL_EXT_shadow_funcs' : { 'number' : 267, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' : {", "}, 'GL_ARB_copy_buffer' : { 'arbnumber' : 59, 'flags' : { 'public' }, 'url'", "'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : { 'number' : 234, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber' : 176,", "}, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' :", "176, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'NVIDIA' }, 'url'", "}, 'GL_NV_uniform_buffer_unified_memory' : { 'number' : 459, 'flags' : { 'public' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : { 'number' : 287,", "'number' : 307, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : { 'arbnumber' : 114, 'flags' : { 'public' },", "}, 'GL_EXT_texture' : { 'number' : 4, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : { 'arbnumber' :", "}, 'GL_SGIX_complex_polar' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', },", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt',", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt',", "}, 'GL_SGIX_reference_plane' : { 'number' : 60, 'flags' : { 'public' }, 'supporters'", "'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : { 'number' :", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', },", "471, 'esnumber' : 234, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', },", "'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : { 'arbnumber' : 43, 'flags' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : { 'number' : 390,", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt',", ": { 'number' : 134, 'flags' : { 'incomplete', 'public' }, 'supporters' :", ": 274, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_AMD_sparse_texture' : { 'number' : 426, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' :", ": 271, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", ": 105, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' :", "'esnumber' : 161, 'flags' : { 'public' }, 'supporters' : { 'INTEL' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : { 'number' : 265, 'flags'", "'GL_ARB_get_program_binary' : { 'arbnumber' : 96, 'flags' : { 'public' }, 'url' :", ": { 'arbnumber' : 108, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt',", "'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' : 137, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : { 'esnumber'", ": { 'arbnumber' : 63, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt',", "}, 'GLX_MESA_pixmap_colormap' : { 'number' : 216, 'flags' : { 'public' }, 'supporters'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader'", "}, 'GL_SUN_convolution_border_modes' : { 'number' : 182, 'flags' : { 'public' }, 'supporters'", "'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : { 'number' : 366, 'flags' : { 'public' },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : { 'number' : 355,", "'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : { 'esnumber' : 270,", ": { 'public' }, 'supporters' : { 'ES', 'INGR', 'SGI' }, 'url' :", "56, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : {", ": 19, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' :", ": { 'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' :", "}, 'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4'", ": { 'number' : 459, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt',", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : { 'number'", "{ 'number' : 402, 'esnumber' : 152, 'flags' : { 'public' }, 'supporters'", ": 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : { 'arbnumber' : 184, 'flags' : { 'public'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : { 'number' : 286,", "'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber' : 106, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : { 'esnumber'", "'number' : 141, 'flags' : { 'public' }, 'supporters' : { '3DFX', '3DL',", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt', },", "'esnumber' : 109, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order'", ": 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber' : 159, 'flags' : { 'public'", "{ 'arbnumber' : 17, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "{ 'number' : 333, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : { 'number' :", "{ 'arbnumber' : 124, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : {", ": { 'number' : 351, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : { 'esnumber' : 178, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : { 'arbnumber' :", ": { 'arbnumber' : 99, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt',", "'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number' : 456, 'flags' : {", "'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : { 'number' : 215, 'flags' : {", "{ 'esnumber' : 30, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', },", "}, 'GL_MESA_shader_integer_functions' : { 'number' : 495, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' :", "86, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : {", "}, 'GL_ARB_bindless_texture' : { 'arbnumber' : 152, 'flags' : { 'public' }, 'url'", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : { 'number' : 244,", "'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : { 'arbnumber' : 67, 'flags' : {", "'GL_INTEL_conservative_rasterization' : { 'number' : 491, 'esnumber' : 265, 'flags' : { 'public'", "{ 'number' : 143, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : { 'number' : 414, 'flags'", "}, 'GL_EXT_debug_label' : { 'number' : 439, 'esnumber' : 98, 'flags' : {", "{ 'number' : 7, 'flags' : { 'public' }, 'supporters' : { 'KGC',", ": 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : { 'arbnumber' : 10, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : { 'arbnumber' : 15,", ": { 'esnumber' : 97, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt',", "'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' : { 'number' : 145, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt', },", "}, 'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : { 'number' : 341, 'flags' :", "}, 'GL_NV_framebuffer_blit' : { 'esnumber' : 142, 'flags' : { 'public' }, 'url'", "22, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : {", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : {", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' :", ": 101, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' },", "}, 'WGL_ARB_pixel_format' : { 'arbnumber' : 9, 'flags' : { 'public' }, 'supporters'", ": { 'esnumber' : 208, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_copy_image.txt',", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' :", ": { 'arbnumber' : 106, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : { 'arbnumber' :", ": { 'number' : 454, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt',", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : { 'esnumber'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : { 'flags' :", "'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' : { 'esnumber' : 205, 'flags' : { 'public'", ": 31, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' :", ": { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', },", "{ 'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : { 'esnumber' : 67,", ": { 'public' }, 'supporters' : { 'ES', 'HP', 'SGI' }, 'url' :", "'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : { 'number' : 210, 'flags'", "'arbnumber' : 123, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context'", "'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' : { 'arbnumber' : 126,", "'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : { 'arbnumber' : 108, 'flags' : { 'public' },", "{ 'number' : 110, 'flags' : { 'public' }, 'supporters' : { 'IBM'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : { 'number'", "'number' : 513, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : { 'arbnumber' : 64, 'flags' : { 'public'", "'number' : 462, 'esnumber' : 226, 'flags' : { 'public' }, 'url' :", "{ 'esnumber' : 185, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt', },", "}, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : { 'flags' : { 'incomplete' },", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : { 'arbnumber'", "'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number' : 397, 'flags'", "some other vendor extensions, but shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : { 'esnumber'", ": 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : { 'esnumber' : 267, 'flags' : { 'public'", "ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : { 'esnumber' : 184, 'flags' : { 'public' },", "'number' : 190, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt',", "'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : { 'esnumber' : 48, 'flags'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels'", ": 54, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": 40, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": 519, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "{ 'number' : 268, 'flags' : { 'public' }, 'supporters' : { 'APPLE',", "'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt',", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt',", "'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : { 'esnumber' : 251, 'flags' : {", "'number' : 90, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI'", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : {", "'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : { 'esnumber' : 142, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : { 'arbnumber' :", ": 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : { 'number' : 20, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt',", ": { 'esnumber' : 272, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt',", "{ 'number' : 376, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'number' : 22, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'IBM', 'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : { 'number' : 39,", ": { 'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : { 'number' :", ": 364, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' },", "}, 'GL_SGIX_shadow_ambient' : { 'number' : 90, 'flags' : { 'public' }, 'supporters'", "{ 'esnumber' : 123, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', },", ": 169, 'esnumber' : 189, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt',", ": { 'number' : 521, 'esnumber' : 300, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : { 'esnumber' : 116, 'flags'", ": 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : { 'number' : 141, 'flags' : { 'public'", ": 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : { 'number' : 13, 'flags' : { 'public'", ": { 'esnumber' : 74, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt',", ": 136, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url'", "}, 'supporters' : { 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' :", "}, 'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : { 'number' : 346, 'esnumber' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt',", "'arbnumber' : 70, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects'", ": 454, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : { 'number'", ": 91, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' :", "'GL_EXT_texture_border_clamp' : { 'esnumber' : 182, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : { 'number' : 463, 'esnumber' : 259,", "'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : { 'number' : 169,", "'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' : { 'number' : 505, 'esnumber' : 282, 'flags'", "155, 'flags' : { 'public' }, 'supporters' : { 'REND' }, 'url' :", "'number' : 361, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 83, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' :", "'GL_NV_multisample_filter_hint' : { 'number' : 259, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : { 'arbnumber'", ": 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : { 'number' : 493, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : { 'number' : 457,", ": 2, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : { 'arbnumber' : 51, 'flags'", ": { 'number' : 410, 'esnumber' : 199, 'flags' : { 'public' },", "'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : { 'number' : 375, 'flags'", "}, 'GL_INTEL_blackhole_render' : { 'number' : 521, 'esnumber' : 300, 'flags' : {", ": 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : { 'number' : 502, 'flags' : { 'public'", "'number' : 367, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", ": { 'number' : 256, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : { 'esnumber' : 153,", "'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber' : 51, 'flags' : {", "'../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : { 'esnumber' : 91, 'flags' : { 'public' },", ": { 'esnumber' : 70, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_ATI_vertex_streams' : { 'number' : 249, 'flags' : { 'public' }, 'supporters'", "'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : { 'number' : 35, 'flags' : { 'public' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : { 'number' :", "}, 'GL_ARB_sparse_texture2' : { 'arbnumber' : 186, 'flags' : { 'public' }, 'url'", "'GL_NV_query_resource_tag' : { 'number' : 512, 'flags' : { 'public' }, 'supporters' :", "'number' : 506, 'esnumber' : 283, 'flags' : { 'public' }, 'url' :", "234, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'arbnumber' : 189, 'esnumber' : 249, 'flags' : { 'public' }, 'url'", "{ 'arbnumber' : 79, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt',", "125, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : { 'number'", "'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : { 'arbnumber' : 133, 'flags'", "36, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float'", ": { 'arbnumber' : 112, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : { 'arbnumber'", "{ 'number' : 363, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'arbnumber' : 17, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": 11, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": 427, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : {", "}, 'GL_ARB_texture_query_levels' : { 'arbnumber' : 140, 'flags' : { 'public' }, 'url'", "'GLX_SUN_get_transparent_index' : { 'number' : 183, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 446, 'flags' : { 'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt',", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : { 'number' : 212,", "}, 'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : { 'esnumber' : 68, 'flags' :", "{ 'esnumber' : 247, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', },", "'number' : 147, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg'", ": { 'esnumber' : 269, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt',", "153, 'flags' : { 'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : {", ": 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension spec with WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error'", "{ 'arbnumber' : 66, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sync.txt', },", "'GL_ARB_shading_language_420pack' : { 'arbnumber' : 108, 'flags' : { 'public' }, 'url' :", "{ 'arbnumber' : 137, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : { 'arbnumber'", "'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : { 'number' : 450, 'flags'", "'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' : { 'arbnumber'", ": 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : { 'arbnumber' : 69, 'flags' : { 'public'", "{ 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : { 'number' : 516,", "231, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : {", "'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt',", "'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : { 'number' : 431, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : { 'arbnumber'", ": { 'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : { 'esnumber' :", "'number' : 276, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : { 'arbnumber' : 139,", ": { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore' }, },", "}, 'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : { 'arbnumber' : 81, 'flags' :", "243, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : {", ": 291, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' :", "'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' : 189,", "}, 'GL_NV_platform_binary' : { 'esnumber' : 131, 'flags' : { 'public' }, 'url'", ": 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : { 'number' : 129, 'flags' : { 'public'", "'GL_ARB_indirect_parameters' : { 'arbnumber' : 154, 'flags' : { 'public' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : { 'number' : 231,", ": 222, 'esnumber' : 52, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 328, 'flags' : { 'public' }, 'supporters' : {", "'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : { 'arbnumber' : 98, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : { 'arbnumber' : 190,", "'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : { 'flags'", "{ 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX'", ": { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt',", "{ 'arbnumber' : 163, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt', },", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : {", "}, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : { 'number' : 168, 'flags' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : { 'esnumber' :", ": 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : { 'arbnumber' : 55, 'flags' : { 'public'", "'esnumber' : 53, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary'", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : { 'number' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : { 'number'", "'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : { 'arbnumber' : 175, 'esnumber' : 243,", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : { 'flags'", "}, 'GL_ARB_multi_draw_indirect' : { 'arbnumber' : 133, 'flags' : { 'public' }, 'url'", "'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : { 'number'", "515, 'esnumber' : 292, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32'", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : { 'number' :", "291, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : {", ": { 'arbnumber' : 20, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' :", ": { 'esnumber' : 193, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt',", "{ 'number' : 218, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "155, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt',", "'GL_NV_texture_shader3' : { 'number' : 265, 'flags' : { 'public' }, 'supporters' :", ": 63, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' },", ": 205, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' :", "'number' : 333, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : { 'number' : 300, 'flags' : { 'public'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' :", ": { 'esnumber' : 119, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt',", "'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : { 'number' : 139,", "}, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' :", "'GL_ARB_ES3_1_compatibility' : { 'arbnumber' : 159, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture'", ": 63, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' :", ": { 'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : { 'esnumber' :", "'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : { 'number' : 254, 'flags' : { 'public' },", "}, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : {", ": { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', },", "}, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber' : 89, 'flags' : { 'public' }, 'url'", "'number' : 328, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : { 'arbnumber' : 70,", "'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : { 'esnumber' : 66, 'flags'", "'number' : 248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting'", "'esnumber' : 188, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth'", "{ 'number' : 4, 'flags' : { 'public' }, 'supporters' : { 'HP',", ": { 'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : { 'number' :", "'GLX_SGIX_dm_buffer' : { 'number' : 86, 'flags' : { 'public' }, 'supporters' :", "{ 'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : { 'number' : 154,", "{ 'public' }, 'supporters' : { 'ES', 'HP', 'IBM', 'SGI', 'SUN' }, 'url'", "'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : { 'number' : 241, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' :", "'GL_NV_pack_subimage' : { 'esnumber' : 132, 'flags' : { 'public' }, 'url' :", "'GL_NV_texture_barrier' : { 'number' : 381, 'esnumber' : 271, 'flags' : { 'public'", "}, 'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : { 'esnumber' : 47, 'flags' :", ": { 'esnumber' : 217, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt',", "{ 'number' : 177, 'flags' : { 'public' }, 'supporters' : { 'INGR'", "'comments' : 'Supported on Visual Workstation 320 / 540 only.', }, 'GL_SGIX_ycrcba' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt',", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt', },", "}, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' :", "'GL_ATI_texture_float' : { 'number' : 280, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt',", "100, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : {", ": 66, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN' },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', },", "{ 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' :", ": { 'number' : 25, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend'", "{ 'esnumber' : 20, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt', },", ": { 'number' : 506, 'esnumber' : 283, 'flags' : { 'public' },", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : { 'number' :", "'esnumber' : 37, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8'", "'number' : 434, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_NV_viewport_array2' : { 'number' : 476, 'esnumber' : 237, 'flags' : { 'public'", "{ 'number' : 441, 'flags' : { 'public' }, 'supporters' : { 'INTEL'", ": { 'number' : 66, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui'", "'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' : { 'number' : 504, 'esnumber' : 281, 'flags'", ": 312, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' :", ": 'Draft spec location unknown.', }, 'GL_OES_point_size_array' : { 'esnumber' : 14, 'flags'", ": 503, 'esnumber' : 280, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt',", "}, 'GL_IMG_program_binary' : { 'esnumber' : 67, 'flags' : { 'public' }, 'url'", "'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : { 'number' : 499,", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : {", "'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : { 'number' : 407, 'flags' : {", "}, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture' : {", "'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : { 'number' : 384, 'flags'", "}, 'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : { 'number' : 512, 'flags' :", "'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : { 'number' : 488, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', },", "'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : { 'number' : 276, 'flags' : {", "}, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt', },", ": { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : { 'number' :", "}, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : { 'esnumber' : 107, 'flags' :", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number' : 397,", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range'", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : { 'number' :", "157, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : { 'number' : 20, 'flags' :", "'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' : 185, 'flags' : {", "{ 'number' : 339, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'arbnumber' : 129, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt',", "34, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url'", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : { 'number' :", "82, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : { 'flags' :", "'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : { 'esnumber' : 137, 'flags'", "6, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'IBM', 'SGI',", ": 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension which is referred to by some other", ": { 'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : { 'esnumber' :", "}, 'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : { 'arbnumber' : 175, 'esnumber' :", "'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : { 'number' : 440, 'esnumber'", "'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : { 'esnumber' : 40, 'flags'", "'esnumber' : 20, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : { 'esnumber' :", "}, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : { 'flags' : { 'incomplete' },", ": { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : { 'arbnumber' :", "}, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : { 'esnumber' : 139, 'flags' :", "'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' :", "{ 'esnumber' : 70, 'flags' : { 'public' }, 'supporters' : { 'QCOM'", "'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read'", "'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : { 'number' : 269, 'flags' : { 'public' },", "'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : { 'number' : 427,", ": { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : { 'number' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : { 'number' : 86, 'flags'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' :", "'esnumber' : 33, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap'", "'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : { 'number'", ": 245, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", ": 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : { 'esnumber' : 247, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : { 'esnumber' : 159, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : { 'arbnumber' : 56,", "}, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' : 409, 'flags' : { 'public' }, 'supporters'", "'GL_NV_texture_compression_vtc' : { 'number' : 228, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber' : 46, 'flags' :", "{ 'number' : 383, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'number' : 265, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'esnumber' : 75, 'flags' : { 'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate'", "'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber' : 52, 'flags'", "{ 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' : { 'esnumber' : 35, 'flags' : {", "'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : { 'number' : 385, 'flags' : { 'public' },", ": 95, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : { 'number'", "{ 'number' : 116, 'flags' : { 'obsolete' }, 'supporters' : { 'SGI'", ": 178, 'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url'", "{ 'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : { 'number' : 218,", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : { 'number' : 270, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : { 'number'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : {", "}, 'WGL_3DL_stereo_control' : { 'number' : 313, 'flags' : { 'public' }, 'supporters'", ": 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : { 'arbnumber' : 27, 'flags' : { 'public'", "'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : { 'number' : 96,", "'GL_EXT_vertex_attrib_64bit' : { 'number' : 387, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 149, 'flags' : { 'public' }, 'supporters' : {", "'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : { 'esnumber' : 129, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt',", "'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : { 'number' : 117, 'flags' : { 'public' },", "{ 'number' : 503, 'esnumber' : 280, 'flags' : { 'public' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : { 'number' : 401,", "394, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'GL_NV_conservative_raster_dilate' : { 'number' : 480, 'flags' : { 'public' }, 'url'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : { 'number'", ": { 'number' : 498, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : { 'arbnumber'", "'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : { 'number' : 288, 'flags' : {", "'ES', 'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : { 'esnumber' : 137,", "79, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : {", "}, 'GL_EXT_draw_buffers_indexed' : { 'esnumber' : 176, 'flags' : { 'public' }, 'url'", "{ 'esnumber' : 264, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', },", "'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : { 'number' : 330, 'flags' : { 'public' },", ": { 'number' : 238, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 178, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : { 'arbnumber' : 76,", ": 478, 'esnumber' : 241, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt',", "'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' : 165, 'flags'", "'GL_ANGLE_framebuffer_blit' : { 'esnumber' : 83, 'flags' : { 'public' }, 'url' :", "{ 'number' : 63, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : { 'number' : 78, 'flags'", "'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : { 'number' : 65, 'flags' : { 'public' },", ": { 'number' : 133, 'flags' : { 'incomplete', 'public' }, 'supporters' :", "}, 'GL_ARB_shader_subroutine' : { 'arbnumber' : 90, 'flags' : { 'public' }, 'url'", "'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : { 'esnumber' : 87, 'flags' : { 'public' },", "376, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "195, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : { 'number' : 498, 'flags' : { 'public'", "'GL_ARB_direct_state_access' : { 'arbnumber' : 164, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' : 56, 'flags' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : {", "'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : { 'number' : 221, 'flags' : {", "'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : {", "}, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' : 291, 'flags' : { 'public' }, 'url'", ": 192, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'arbnumber' : 180, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt',", "'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : { 'number' : 242, 'flags' : {", "{ 'esnumber' : 176, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : { 'number' : 419, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : { 'esnumber' :", "}, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : { 'number' : 100, 'flags' :", ": 201, 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url'", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', },", ": 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : { 'esnumber' : 88, 'flags' : { 'public'", "'number' : 95, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI'", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : { 'number' : 32,", ": { 'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : { 'number' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : { 'arbnumber' :", "spec location unknown.', }, 'GL_OES_point_size_array' : { 'esnumber' : 14, 'flags' : {", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : { 'number' : 309,", ": { 'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : { 'number' :", "'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : { 'arbnumber' : 105, 'flags'", "{ 'public' }, 'supporters' : { 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt',", ": { 'number' : 13, 'flags' : { 'public' }, 'supporters' : {", "'Alias to GLX_ARB_create_context_profile not needed - see arbnumber 75.', }, 'GLX_ARB_create_context_no_error' : {", "'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : { 'number' : 501, 'esnumber' : 274,", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : { 'esnumber'", ": 22, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' :", ": 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : { 'arbnumber' : 73, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' : 110,", "'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : { 'number' : 310, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8'", "}, 'GL_SGIX_pixel_texture' : { 'number' : 499, 'flags' : { 'public' }, 'supporters'", "{ 'esnumber' : 131, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt', },", "'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : { 'esnumber' : 202, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : { 'arbnumber' : 40, 'flags' :", ": { 'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : { 'number' :", "'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : { 'number' : 158,", "}, 'GL_ARB_conditional_render_inverted' : { 'arbnumber' : 161, 'flags' : { 'public' }, 'url'", ": 176, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', },", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' :", "'number' : 346, 'esnumber' : 198, 'flags' : { 'public' }, 'supporters' :", ": 273, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : { 'number' : 396, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : { 'number' :", ": 42, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' : 21,", "}, 'GL_AMD_performance_monitor' : { 'number' : 360, 'esnumber' : 50, 'flags' : {", "'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number' : 519, 'flags'", "{ 'number' : 198, 'esnumber' : 154, 'flags' : { 'public' }, 'supporters'", ": 223, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' :", ": 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : { 'arbnumber' : 112, 'flags' : { 'public'", "{ 'esnumber' : 24, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth24.txt', },", "}, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' : { 'incomplete' },", "'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : { 'esnumber' : 255, 'flags' : {", ": 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : { 'number' : 517, 'esnumber' : 297, 'flags'", "}, 'supporters' : { 'MS', 'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' :", "'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : { 'number' : 146, 'flags' : {", "'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' : 42, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' : 130, 'flags' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias' :", "'GL_KHR_blend_equation_advanced' : { 'arbnumber' : 174, 'esnumber' : 168, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree'", "{ 'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : { 'number' : 336,", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt',", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : { 'number' :", "'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : { 'number' : 232, 'flags' : { 'public' },", "'WGL_ARB_extensions_string' : { 'arbnumber' : 8, 'flags' : { 'public' }, 'supporters' :", "{ 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : { 'number' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary'", "'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : { 'number' : 265, 'flags' : { 'public' },", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : {", "}, 'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : { 'number' : 49, 'flags' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : { 'arbnumber' : 77,", "}, 'GL_ARB_debug_output' : { 'arbnumber' : 104, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : { 'arbnumber' :", "403, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : { 'arbnumber' : 160,", "51, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : { 'arbnumber' : 39,", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : { 'number' : 233, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : { 'arbnumber' : 41,", ": { 'esnumber' : 107, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt',", ": 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : { 'number' : 194, 'flags' : { 'public'", "'number' : 398, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : { 'number'", ": { 'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : { 'number' :", "}, 'GL_NVX_conditional_render' : { 'number' : 425, 'flags' : { 'public' }, 'supporters'", ": 240, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', },", "{ 'esnumber' : 170, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt', },", ": 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : { 'arbnumber' : 192, 'esnumber' : 288, 'flags'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias'", "}, 'GL_KHR_blend_equation_advanced' : { 'arbnumber' : 174, 'esnumber' : 168, 'flags' : {", "}, }, 'GLX_ARB_create_context_profile' : { 'arbnumber' : 75, 'flags' : { 'public' },", "{ 'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : { 'number' : 41,", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', },", "'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' :", "}, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' : 21, 'flags' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : { 'arbnumber'", "184, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : {", "{ 'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : { 'number' : 223,", "'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : { 'number' : 294, 'esnumber' : 6, 'flags' :", "'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : { 'number'", "}, 'GL_NV_parameter_buffer_object2' : { 'number' : 378, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : {", ": { 'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : { 'flags' :", ": 61, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' :", "37, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : {", ": { 'number' : 352, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber' : 74,", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : {", "'GL_EXT_texture_cube_map_array' : { 'esnumber' : 184, 'flags' : { 'public' }, 'url' :", "'number' : 505, 'esnumber' : 282, 'flags' : { 'public' }, 'url' :", "'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : { 'number' : 135, 'flags' : { 'public' },", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' :", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : { 'esnumber' :", "'GL_NV_shader_atomic_counters' : { 'number' : 423, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : { 'number' : 496, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : { 'arbnumber' : 122, 'flags' : {", "410, 'esnumber' : 199, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : {", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt',", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : { 'number'", ": { 'number' : 140, 'flags' : { 'incomplete' }, 'supporters' : {", "{ 'esnumber' : 84, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', },", "'GL_KHR_no_error' : { 'arbnumber' : 175, 'esnumber' : 243, 'flags' : { 'public'", "}, 'GL_AMD_occlusion_query_event' : { 'number' : 442, 'flags' : { 'public' }, 'supporters'", "}, 'GL_ARM_mali_shader_binary' : { 'esnumber' : 81, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : {", "'number' : 498, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 110, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' :", ": 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : { 'flags' : { 'public' }, 'supporters' :", "'GLU_SGIX_icc_compress' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture'", "'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : { 'esnumber' : 15, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber' : 299,", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt',", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : { 'number'", "}, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : { 'number' : 57, 'flags' :", ": 510, 'esnumber' : 285, 'flags' : { 'public' }, 'supporters' : {", "'GL_OES_matrix_palette' : { 'esnumber' : 12, 'flags' : { 'public' }, 'url' :", ": { 'number' : 355, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 44, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : { 'number'", ": 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : { 'number' : 351, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt',", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : { 'number' : 7, 'flags'", "'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber' : 167, 'flags'", "}, 'GL_EXT_multiple_textures' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' :", "'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : { 'number' : 270, 'flags' : {", ": { 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : { 'arbnumber' :", "{ 'esnumber' : 39, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', },", "124, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : {", ": { 'number' : 90, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : { 'number' : 195, 'flags' : { 'public'", ": 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : { 'number' : 476, 'esnumber' : 237, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt',", "'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : { 'number' : 274, 'flags' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber' : 144,", ": 268, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' :", ": { 'arbnumber' : 161, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt',", ": 85, 'flags' : { 'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' :", "{ 'number' : 520, 'esnumber' : 122, 'flags' : { 'public' }, 'url'", ": 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber' : 171, 'flags' : { 'public'", "'esnumber' : 256, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex'", "{ 'arbnumber' : 139, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', },", "'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI', 'SUN'", ": 517, 'esnumber' : 297, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt',", "'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : { 'number' : 346, 'esnumber' : 198, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : { 'arbnumber' :", "{ 'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : { 'number' : 216,", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' :", "{ 'number' : 113, 'flags' : { 'public' }, 'supporters' : { 'MS'", "'number' : 15, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "}, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : { 'number' : 204, 'flags'", "'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : { 'esnumber' : 3, 'flags' : {", ": 450, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' :", "'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : { 'number' : 51, 'flags' : {", "{ 'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : { 'number' : 110,", "}, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : { 'esnumber' : 218, 'flags' :", ": 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : { 'arbnumber' : 140, 'flags' : { 'public'", "}, 'GL_EXT_texture_shared_exponent' : { 'number' : 333, 'flags' : { 'public' }, 'supporters'", "}, 'GL_SGIX_occlusion_instrument' : { 'number' : 151, 'flags' : { 'incomplete' }, 'supporters'", "'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : { 'arbnumber' : 48, 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : { 'arbnumber' : 64, 'flags'", "221, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : {", "}, 'GL_NV_shader_atomic_float' : { 'number' : 419, 'flags' : { 'public' }, 'supporters'", "'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : { 'number' : 244, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : { 'esnumber' : 217, 'flags'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : { 'number' :", "238, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : {", ": 302, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "162, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, }, 'GL_ARB_compatibility' : { 'arbnumber' : 58, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : { 'arbnumber' : 20, 'flags' :", "'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : { 'arbnumber' : 69, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' :", "253, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : {", "'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : { 'number' : 11, 'flags' : {", "'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : { 'number' : 383, 'flags'", "'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : { 'esnumber' : 85, 'flags'", "'public' }, 'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', },", "'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : { 'number' : 149, 'flags' : {", ": { 'number' : 302, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 448, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt',", ": 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : { 'arbnumber' : 87, 'flags' : { 'public'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt',", "'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt',", "}, 'GL_AMD_conservative_depth' : { 'number' : 385, 'flags' : { 'public' }, 'supporters'", "'GL_NV_gpu_multicast' : { 'number' : 494, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : { 'number' : 513,", "'GL_NV_point_sprite' : { 'number' : 262, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : { 'arbnumber' : 33,", ": 411, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "}, 'GL_ARB_buffer_storage' : { 'arbnumber' : 144, 'flags' : { 'public' }, 'url'", "'number' : 441, 'flags' : { 'public' }, 'supporters' : { 'INTEL' },", "'GL_OES_gpu_shader5' : { 'esnumber' : 211, 'flags' : { 'public' }, 'url' :", "}, 'GL_NV_viewport_array2' : { 'number' : 476, 'esnumber' : 237, 'flags' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample',", ": 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : { 'esnumber' : 129, 'flags' : { 'public'", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' :", "'GL_SGIX_fog_blend' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha'", "'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : { 'number' : 160, 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : { 'arbnumber' : 157, 'flags'", "'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : { 'esnumber' : 83, 'flags' : {", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' :", "138, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : {", "{ 'IdSoftware', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor'", "'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : { 'arbnumber' : 136, 'flags' : { 'public' },", "'number' : 24, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'GL_SGIS_texture_edge_clamp' : { 'number' : 35, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : { 'esnumber' :", ": 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : { 'arbnumber' : 108, 'flags' : { 'public'", "'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : { 'number' : 239, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt', },", "{ 'number' : 86, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'GL_SGIX_cube_map' : { 'number' : 130, 'flags' : { 'incomplete' }, 'supporters'", ": 13, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": { 'esnumber' : 273, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt',", "'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' : 58, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : { 'arbnumber' : 78,", "'number' : 427, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA'", ": { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', },", ": 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : { 'number' : 253, 'flags' : { 'public'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' :", ": 85, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' :", "'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : { 'number' : 217, 'flags' : {", "'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : { 'number' : 159, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' : {", ": { 'number' : 254, 'flags' : { 'public' }, 'supporters' : {", "'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : { 'esnumber' : 248, 'flags'", ": 267, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : { 'esnumber' : 13, 'flags'", "}, 'WGL_I3D_swap_frame_lock' : { 'number' : 254, 'flags' : { 'public' }, 'supporters'", ": 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : { 'number' : 383, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' : 409,", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : { 'number'", "}, 'GLX_SGIX_video_source' : { 'number' : 43, 'flags' : { 'public' }, 'supporters'", "'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : { 'arbnumber' : 31, 'flags'", "'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' : { 'arbnumber' : 1, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : { 'number' : 185, 'flags' :", "'GL_EXT_static_vertex_array' : { 'flags' : { 'public' }, 'supporters' : { 'IBM' },", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : { 'arbnumber' :", ": { 'public' }, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', },", "'esnumber' : 58, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic'", "{ 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', },", "{ 'esnumber' : 160, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt', },", ": 51, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'number' : 221, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422'", "}, 'GL_NV_shader_buffer_store' : { 'number' : 390, 'flags' : { 'public' }, 'supporters'", "'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : { 'number' : 507, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber'", "}, 'GL_EXT_texture_compression_latc' : { 'number' : 331, 'flags' : { 'public' }, 'supporters'", "}, 'GL_EXT_rescale_normal' : { 'number' : 27, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : { 'number'", ": 424, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_ARB_transform_feedback2' : { 'arbnumber' : 93, 'flags' : { 'public' }, 'url'", "5, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": 166, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' :", "'number' : 318, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", ": 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : { 'number' : 331, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' : { 'number'", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', },", "'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' : 110, 'flags' : {", "'GL_KHR_debug' : { 'arbnumber' : 119, 'esnumber' : 118, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' : 253,", "{ 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', },", "{ 'number' : 203, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "{ 'number' : 384, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : { 'arbnumber' : 150, 'flags' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : { 'arbnumber' : 65,", "'esnumber' : 158, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color'", "520, 'esnumber' : 122, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias'", "'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : { 'arbnumber' : 48, 'flags'", "'GL_QCOM_framebuffer_foveated' : { 'esnumber' : 273, 'flags' : { 'public' }, 'url' :", "'number' : 359, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "}, 'GL_SGIX_color_matrix_accuracy' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', },", ": 186, 'esnumber' : 60, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt',", "{ 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', },", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : { 'number' : 373, 'esnumber'", "'esnumber' : 189, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness'", "'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : { 'number' : 363, 'flags' : {", "'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' : 182, 'flags' : { 'public' }, 'url' :", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' :", "}, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : { 'arbnumber' : 69, 'flags' :", "}, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : { 'number' : 363, 'flags' :", "61, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : {", "{ 'number' : 432, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'INGR', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : { 'number'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : {", ": 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : { 'arbnumber' : 167, 'flags' : { 'public'", "365, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url'", "{ 'number' : 246, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : { 'number' : 182, 'flags'", "}, 'GL_3DFX_tbuffer' : { 'number' : 208, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 192, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' :", "{ 'number' : 484, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', },", "}, 'GL_OES_required_internalformat' : { 'esnumber' : 115, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : { 'arbnumber' :", "'number' : 335, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : { 'number' : 509, 'flags' : { 'public'", "'esnumber' : 11, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette'", "478, 'esnumber' : 241, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt', },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt',", ": 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : { 'number' : 173, 'flags' : { 'public'", "'GL_EXT_texture_snorm' : { 'number' : 365, 'flags' : { 'public' }, 'supporters' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : { 'esnumber' : 102, 'flags'", ": { 'number' : 512, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_NV_geometry_program4' : { 'number' : 323, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory'", "'esnumber' : 293, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent'", "30, 'flags' : { 'public' }, 'supporters' : { 'DEC', 'HP', 'IBM', 'INGR',", ": 509, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : { 'esnumber' : 57, 'flags' : { 'public'", "452, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : {", "'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' :", "'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' : { 'esnumber'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', },", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : {", "{ 'number' : 413, 'flags' : { 'public' }, 'supporters' : { 'AMD'", ": 50, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : { 'number' : 295, 'esnumber'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : { 'number'", "}, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber' : 125, 'flags' :", ": { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax'", "'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : { 'number' : 12, 'flags' : { 'public' },", "'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : { 'number' : 91, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber' : 142, 'flags' : {", "{ 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', },", "}, 'GL_EXT_raster_multisample' : { 'number' : 462, 'esnumber' : 226, 'flags' : {", "shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : { 'esnumber' : 90, 'flags' : {", "239, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "{ 'esnumber' : 74, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', },", "'number' : 463, 'esnumber' : 259, 'flags' : { 'public' }, 'url' :", "'GL_SGI_color_table' : { 'number' : 14, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : { 'number' : 194, 'flags' :", "}, 'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : { 'esnumber' : 88, 'flags' :", "'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' :", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : { 'esnumber'", "479, 'esnumber' : 242, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt', },", "}, 'GL_SGIX_list_priority' : { 'number' : 80, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', },", "'WGL_EXT_swap_control' : { 'number' : 172, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : { 'esnumber' : 33, 'flags' : {", ": 93, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' },", ": 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : { 'number' : 415, 'flags' : { 'public'", "}, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : { 'number' : 441, 'flags' :", "'ATI', 'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : { 'number' : 142,", "'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : { 'number' : 117, 'flags'", ": { 'number' : 23, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : { 'number' : 58, 'flags' : {", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap'", "'number' : 187, 'esnumber' : 41, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' :", ": { 'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : { 'arbnumber' :", "'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : { 'number' : 496, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : {", ": { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : {", "409, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url'", "{ 'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber' : 54,", "{ 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', },", ": { 'number' : 74, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt',", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit'", "{ 'number' : 278, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "{ 'arbnumber' : 194, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber'", ": { 'number' : 347, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : { 'arbnumber' :", "{ '3DFX', '3DL', 'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : { 'esnumber'", "'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : { 'number' : 432, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber' : 109,", ": { 'public' }, 'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : { 'esnumber' :", "'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : { 'number'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : { 'number'", "'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : { 'arbnumber' : 104, 'flags'", ": { 'esnumber' : 290, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt',", "}, 'WGL_EXT_pbuffer' : { 'number' : 171, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 28, 'flags' : { 'public' }, 'supporters' : { 'IBM',", "'public' }, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' :", ": { 'number' : 371, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' : { 'number'", "'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : { 'arbnumber' : 115, 'flags' : {", "}, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : {", ": 87, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'IBM', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : { 'number' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias'", "'WGL_EXT_swap_control_tear' : { 'number' : 415, 'flags' : { 'public' }, 'supporters' :", "'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : { 'number' : 372, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : { 'number' :", ": { 'number' : 282, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : { 'esnumber'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : { 'arbnumber'", "'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : { 'number' : 467, 'esnumber' : 229,", ": 235, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'esnumber' : 130, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', },", "190, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : {", "'number' : 4, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR',", "{ 'number' : 273, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "'GL_EXT_EGL_image_array' : { 'esnumber' : 278, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : { 'number' : 223, 'flags' : {", "'GL_OES_texture_cube_map' : { 'esnumber' : 20, 'flags' : { 'public' }, 'url' :", ": 37, 'esnumber' : 65, 'flags' : { 'public' }, 'supporters' : {", "447, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : {", "'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : { 'number'", "438, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 162, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", ": { 'number' : 445, 'flags' : { 'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt',", "50, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : { 'esnumber' : 171,", "'number' : 334, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'number' : 296, 'esnumber' : 16, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft'", "'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' :", "'number' : 11, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'KGC',", "}, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : { 'number' : 495, 'flags' :", "'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : { 'esnumber' : 30, 'flags'", "'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : { 'number' : 6, 'flags' : { 'public' },", "'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' : { 'esnumber' : 112,", "'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : { 'number' : 97, 'flags'", "{ 'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : { 'number' : 201,", ": { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', },", "'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : { 'number' : 309, 'esnumber' : 49, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : { 'arbnumber' :", "'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : { 'number' : 211, 'flags' : {", "45, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'GL_SGIX_vector_ops' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object'", "}, 'GL_SGIX_packed_6bytes' : { 'number' : 162, 'flags' : { 'incomplete' }, 'supporters'", "}, 'GL_SGIX_polynomial_ffd' : { 'number' : 59, 'flags' : { 'incomplete' }, 'supporters'", ": 289, 'flags' : { 'public' }, 'supporters' : { 'ANGLE' }, 'url'", ": 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : { 'arbnumber' : 131, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : { 'esnumber' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : { 'number' :", "{ 'esnumber' : 212, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', },", "'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : {", "{ 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : { 'number' : 481,", ": 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : { 'number' : 305, 'flags' : { 'public'", "'Included with arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : { 'arbnumber' : 102, 'flags'", "'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : { 'number' : 413, 'flags'", "'supporters' : { 'ES', 'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : {", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : { 'number'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : { 'arbnumber' : 186,", "{ 'public' }, 'supporters' : { 'IBM', 'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', },", "'GL_INTEL_parallel_arrays' : { 'number' : 136, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', },", "{ 'arbnumber' : 29, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'GL_MESAX_texture_stack' : { 'number' : 318, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : {", ": 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : { 'number' : 150, 'flags' : { 'incomplete'", ": 39, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": 'Alias to GLX_ARB_create_context_profile not needed - see arbnumber 75.', }, 'GLX_ARB_create_context_no_error' :", "'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : { 'number' : 196, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : { 'number'", "'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt',", "80, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : { 'number' :", "{ 'number' : 65, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : { 'number'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : { 'esnumber' : 238,", "{ 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : { 'number' : 351,", "'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : { 'number' : 268, 'flags' : {", "'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control'", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt',", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : { 'arbnumber' :", "'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : { 'flags' : { 'incomplete', 'obsolete' },", "'GL_EXT_clear_texture' : { 'esnumber' : 269, 'flags' : { 'public' }, 'url' :", "'GL_SGIX_async_pixel' : { 'number' : 133, 'flags' : { 'incomplete', 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with arbnumber 55, WGL_ARB_create_context.', },", "'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : { 'esnumber' : 103, 'flags' : { 'public' },", "}, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' : 487, 'esnumber' : 262, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : { 'arbnumber'", ": 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : { 'esnumber' : 82, 'flags' : { 'public'", "358, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "21, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : { 'arbnumber' : 85, 'flags'", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : { 'esnumber' : 70,", "'GLX_SGIX_fbconfig_float' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster'", ": 50, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": 53, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' :", "'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' : 250, 'flags' : { 'public' }, 'url' :", "{ 'arbnumber' : 75, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments'", "{ 'esnumber' : 143, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', },", ": { 'number' : 153, 'flags' : { 'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt',", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : { 'flags'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage'", "'number' : 302, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : { 'number' : 120, 'flags'", "}, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : { 'esnumber' : 248, 'flags' :", "{ 'number' : 327, 'esnumber' : 157, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : { 'number'", "'public' }, 'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : { 'number' : 339, 'flags'", "}, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : { 'number' : 514, 'flags' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed'", "{ 'number' : 72, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', },", ": { 'esnumber' : 188, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt',", ": 358, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_EXT_packed_pixels' : { 'number' : 23, 'flags' : { 'public' }, 'supporters'", "'HP', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : { 'number'", "{ 'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : { 'number' : 111,", "'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt',", ": { 'number' : 195, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'IBM', 'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' :", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : { 'number'", ": 10, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'SGI'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : { 'number' :", "{ 'number' : 479, 'esnumber' : 242, 'flags' : { 'public' }, 'url'", "'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : { 'number' : 352, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : { 'arbnumber' :", "'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber' :", "'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : { 'esnumber' : 105, 'flags' : { 'public' },", "119, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : {", ": { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt',", "'number' : 110, 'flags' : { 'public' }, 'supporters' : { 'IBM' },", "}, 'GL_AMD_shader_stencil_export' : { 'number' : 382, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 326, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'GL_EXT_shader_image_load_formatted' : { 'number' : 449, 'flags' : { 'public' }, 'url' :", ": { 'esnumber' : 293, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt',", ": 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : { 'arbnumber' : 79, 'flags' : { 'public'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : { 'number' : 393, 'flags'", "215, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : { 'number' : 445,", "'number' : 332, 'esnumber' : 286, 'flags' : { 'public' }, 'supporters' :", "'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : { 'number' : 15, 'flags' : { 'public' },", "'WGL_I3D_swap_frame_usage' : { 'number' : 255, 'flags' : { 'public' }, 'supporters' :", "'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' : 5, 'flags' : { 'public' }, 'url' :", "'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : { 'number' : 114, 'flags' : { 'public' },", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : { 'number' : 64, 'flags'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' : 289, 'flags'", "'number' : 25, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "{ 'public' }, 'supporters' : { 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url' :", "'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : { 'number' : 376, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : { 'number' :", "'arbnumber' : 16, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "}, 'GL_ARB_sample_locations' : { 'arbnumber' : 181, 'flags' : { 'public' }, 'url'", "'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' : { 'number' :", "173, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'INGR' }, 'url'", ": { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : { 'number'", "100, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : {", "'public' }, 'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : { 'esnumber' : 43, 'flags'", ": { 'number' : 421, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber' : 95, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : { 'number' : 114, 'flags' : {", "'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : { 'number' : 508, 'esnumber'", "}, 'GL_EXT_framebuffer_blit' : { 'number' : 316, 'flags' : { 'public' }, 'url'", "'GL_ARB_texture_filter_minmax' : { 'arbnumber' : 188, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : {", ": 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : { 'number' : 350, 'flags' : { 'public'", ": { 'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt',", ": 184, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' },", ": 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' : 195, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt',", ": { 'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : { 'number' :", "101, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url'", ": { 'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : { 'esnumber' :", ": { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : { 'number'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : { 'arbnumber' :", "{ 'esnumber' : 203, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt', },", ": 448, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' :", "'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : { 'number' : 382, 'flags' : {", ": 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : { 'esnumber' : 62, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt',", "}, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' : {", ": 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : { 'esnumber' : 132, 'flags' : { 'public'", "7, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": 109, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' :", ": 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : { 'number' : 380, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : { 'flags' : { 'obsolete' }, 'url'", "'arbnumber' : 12, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'number' : 480, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : { 'arbnumber' : 184, 'flags'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, },", "unknown.', }, 'GL_OES_point_size_array' : { 'esnumber' : 14, 'flags' : { 'public' },", ": { 'number' : 265, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' : 155, 'flags' : {", "'arbnumber' : 9, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt', },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data'", ": { 'esnumber' : 186, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt',", ": 468, 'esnumber' : 230, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt',", ": { 'number' : 212, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : { 'number' : 449, 'flags'", ": { 'number' : 248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : { 'number' : 194,", "{ 'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : { 'esnumber' : 82,", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : { 'number' : 80,", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample'", "168, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url'", "}, 'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : { 'number' : 111, 'flags' :", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : { 'esnumber' : 162,", "}, 'GLX_MESA_query_renderer' : { 'number' : 446, 'flags' : { 'public' }, 'url'", "'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : { 'number' : 274, 'flags' : { 'public' },", "419, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : { 'esnumber' : 45, 'flags' :", "'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : { 'number' : 483, 'esnumber'", "'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : { 'arbnumber' : 4, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' : 151, 'flags' :", "'GL_EXT_paletted_texture' : { 'number' : 78, 'flags' : { 'public' }, 'supporters' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : {", ": { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt',", ": 161, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' :", "'GL_ARB_texture_query_levels' : { 'arbnumber' : 140, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : { 'arbnumber' : 146, 'flags' : {", "{ 'arbnumber' : 181, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt', },", "'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber' : 177, 'flags' : { 'public' },", "'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' : 46, 'flags' : {", ": { 'number' : 203, 'flags' : { 'incomplete' }, 'supporters' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : {", ": 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' : 166, 'flags' : { 'public'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : { 'number' :", "}, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : { 'number' : 490, 'esnumber' :", ": 181, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : { 'number' : 125, 'flags' :", "'GLX_SGI_swap_control' : { 'number' : 40, 'flags' : { 'public' }, 'supporters' :", ": { 'arbnumber' : 17, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt', },", "'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : { 'esnumber' : 151, 'flags' : {", "{ 'arbnumber' : 35, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": 111, 'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url'", "25, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : { 'number' : 141, 'flags' :", "}, 'GL_OES_gpu_shader5' : { 'esnumber' : 211, 'flags' : { 'public' }, 'url'", ": { 'number' : 92, 'flags' : { 'public' }, 'supporters' : {", ": { 'IBM', 'IMG', 'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays'", "{ 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' : { 'flags' : { 'obsolete' }, 'url'", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16'", "{ 'number' : 467, 'esnumber' : 229, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : { 'esnumber' : 100, 'flags' :", ": 188, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' :", "'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' : 264, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : { 'number'", "'esnumber' : 93, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil'", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' : 111, 'flags'", "}, 'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : { 'number' : 304, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : { 'arbnumber'", "'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : { 'arbnumber' : 95, 'flags' : {", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : { 'number' : 450,", "}, 'GL_EXT_index_func' : { 'number' : 95, 'flags' : { 'public' }, 'supporters'", ": 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : { 'number' : 135, 'flags' : { 'public'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared extension", "'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : { 'esnumber' : 267, 'flags' : { 'public' },", "'GL_SUN_global_alpha' : { 'number' : 164, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 2, 'flags' : { 'public' }, 'supporters' : { 'HP',", ": 187, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' :", "'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : { 'arbnumber' : 103, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber'", "{ 'number' : 389, 'esnumber' : 260, 'flags' : { 'public' }, 'supporters'", "{ 'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : { 'number' : 498,", "}, 'GL_SGIX_texture_add_env' : { 'number' : 69, 'flags' : { 'public' }, 'supporters'", ": 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' :", "'number' : 262, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : {", "{ 'arbnumber' : 25, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : { 'flags'", ": 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : { 'flags' : { 'incomplete' }, 'url' :", "'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : { 'number' : 271, 'flags' : {", ": { 'arbnumber' : 103, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt',", "'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : { 'arbnumber' : 14, 'flags'", "{ 'arbnumber' : 34, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : { 'arbnumber' :", "'GL_OES_byte_coordinates' : { 'number' : 291, 'esnumber' : 4, 'flags' : { 'public'", "'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : { 'esnumber' : 113, 'flags' : {", "{ 'arbnumber' : 156, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', },", ": 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : { 'number' : 420, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : { 'arbnumber' : 93,", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' :", "'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : { 'number' : 250, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : { 'number' : 138, 'flags' : {", "25, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : { 'arbnumber' : 55,", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : { 'flags' : { 'incomplete'", "247, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : { 'arbnumber' :", "{ 'esnumber' : 192, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt', },", "'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : { 'arbnumber' : 190, 'flags' : { 'public' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : {", ": 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : { 'number' : 166, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt',", ": 170, 'esnumber' : 190, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt',", "'GL_NV_3dvision_settings' : { 'esnumber' : 129, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : { 'number'", "{ 'arbnumber' : 106, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', },", "}, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : { 'number' : 470,", "{ 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url'", "'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : { 'number' : 37, 'esnumber' : 65, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : { 'esnumber'", "'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : { 'number' : 410, 'esnumber' : 199,", ": 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : { 'number' : 385, 'flags' : { 'public'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : { 'number' : 511, 'flags'", "}, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' : 291, 'flags' :", "{ 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor'", ": 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : { 'number' : 231, 'flags' : { 'public'", "'GL_ARB_geometry_shader4' : { 'arbnumber' : 47, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : { 'number' : 449, 'flags' : { 'public'", "'esnumber' : 117, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' :", "{ 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' : { 'esnumber' : 254, 'flags' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle'", "506, 'esnumber' : 283, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', },", "}, 'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : { 'esnumber' : 170, 'flags' :", "}, 'GL_ARB_texture_buffer_range' : { 'arbnumber' : 139, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : { 'number'", ": { 'number' : 402, 'esnumber' : 152, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : { 'esnumber' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer'", "{ 'number' : 416, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'esnumber' : 47, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5'", ": { 'number' : 335, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' : 264, 'flags' : { 'public' }, 'url'", "'GL_EXT_stencil_clear_tag' : { 'number' : 314, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 347, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to WGL_ARB_create_context_profile not needed - see", ": { 'esnumber' : 93, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt',", "{ 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : { 'number' : 297,", ": { 'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : { 'number' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : { 'number' : 436,", "'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : { 'number' : 437, 'esnumber'", "}, 'GL_INTEL_fragment_shader_ordering' : { 'number' : 441, 'flags' : { 'public' }, 'supporters'", ": 198, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : { 'esnumber' : 107, 'flags' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : { 'number' :", "'number' : 327, 'esnumber' : 157, 'flags' : { 'public' }, 'supporters' :", "'arbnumber' : 47, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : { 'esnumber' : 73,", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : { 'arbnumber'", "210, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : {", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt',", "'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : { 'esnumber' : 15, 'flags' : {", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : { 'esnumber' : 83, 'flags'", ": 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : { 'number' : 85, 'flags' : { 'public'", "480, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : {", "'number' : 354, 'flags' : { 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA',", "'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number' : 456, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : { 'number' : 452, 'flags'", ": 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : { 'esnumber' : 155, 'flags' : { 'public'", "}, 'GL_OES_get_program_binary' : { 'esnumber' : 47, 'flags' : { 'public' }, 'url'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : { 'number' :", "{ 'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : { 'esnumber' : 169,", ": 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : { 'flags' : { 'incomplete' }, 'url' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : { 'arbnumber' : 17, 'flags'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : { 'esnumber' :", "{ 'HP', 'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : { 'number' :", ": { 'number' : 408, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : { 'esnumber' :", "'number' : 167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt',", "'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : { 'number' : 290, 'flags' : {", "'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label'", "'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : { 'number' : 344, 'flags' : { 'public' },", "}, 'GL_EXT_shader_integer_mix' : { 'number' : 437, 'esnumber' : 161, 'flags' : {", "'number' : 344, 'flags' : { 'public' }, 'supporters' : { 'MESA', 'NVIDIA'", ": 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : { 'arbnumber' : 19, 'flags' : { 'public'", "{ 'arbnumber' : 160, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : {", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette'", ": { 'number' : 381, 'esnumber' : 271, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : { 'number' : 322, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt',", "{ 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : { 'esnumber' :", "}, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : { 'flags' : { 'incomplete' },", "{ 'number' : 242, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : { 'arbnumber' : 114,", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : { 'esnumber' : 39,", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : { 'number' : 267,", "{ 'number' : 509, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : { 'arbnumber' : 66, 'flags' : { 'public' },", ": { 'arbnumber' : 37, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : {", "}, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : { 'flags' : { 'incomplete' },", "'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : { 'arbnumber' : 76, 'flags' : { 'public' },", ": { 'esnumber' : 275, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt',", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error'", ": 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : { 'number' : 199, 'flags' : { 'public'", "'GL_ARB_provoking_vertex' : { 'arbnumber' : 64, 'flags' : { 'public' }, 'url' :", "'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : { 'number' : 422, 'flags' : { 'public' },", "'arbnumber' : 1, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : {", "'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' : {", ": 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : { 'arbnumber' : 3, 'flags' : { 'public'", "{ 'arbnumber' : 92, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', },", "'esnumber' : 88, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels'", "518, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' : 96,", ": { 'esnumber' : 257, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt',", "'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : { 'number' : 503, 'esnumber' : 280,", "'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : { 'number' : 321, 'flags' : { 'public' },", "'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : { 'number' : 230, 'flags' : {", "'GL_IMG_texture_filter_cubic' : { 'esnumber' : 251, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' :", "'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : { 'arbnumber' : 35, 'flags' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : { 'arbnumber'", "{ 'number' : 102, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : { 'number'", "'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber' : 299, 'flags' : { 'public' },", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : { 'number' :", "{ 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : { 'number' : 254,", "'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : { 'number' : 464, 'esnumber' : 227, 'flags' :", "'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : { 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt',", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt',", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt', },", "}, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : { 'number' : 173, 'flags' :", ": 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : { 'number' : 445, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : { 'number' : 344, 'flags' :", "}, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : { 'number' : 132, 'flags' :", "'GL_SGIX_ycrcba' : { 'number' : 203, 'flags' : { 'incomplete' }, 'supporters' :", "{ 'arbnumber' : 23, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : { 'number' : 129, 'flags'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color'", "'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : { 'arbnumber' : 190, 'flags'", "{ 'arbnumber' : 1, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : { 'number' :", "'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : { 'number' : 56, 'flags' : { 'public' },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : {", "}, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : { 'esnumber' : 97, 'flags' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker' }, },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : { 'number'", "'GL_NV_read_buffer' : { 'esnumber' : 93, 'flags' : { 'public' }, 'url' :", "'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : { 'esnumber' : 222, 'flags'", "{ 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : { 'number' : 25,", ": { 'esnumber' : 170, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt',", ": 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' : 83, 'flags' : { 'public'", "131, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'number' : 258, 'flags' : { 'public' }, 'supporters' : { 'SUN'", "17, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', },", "'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' :", "'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : { 'number' : 189, 'flags'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : { 'arbnumber'", "'number' : 142, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample' }, },", "'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : { 'arbnumber' : 56, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : { 'arbnumber' : 139, 'flags'", "}, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber' : 74, 'flags' : { 'public' }, 'url'", ": { 'number' : 496, 'flags' : { 'public' }, 'supporters' : {", "'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : { 'number' : 481, 'esnumber' : 246, 'flags' :", ": { 'arbnumber' : 69, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt',", "'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url'", "{ 'esnumber' : 153, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', },", "'arbnumber' : 119, 'esnumber' : 118, 'flags' : { 'public' }, 'url' :", "'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : { 'esnumber' : 130, 'flags'", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' :", "182, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', },", "240, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "{ 'number' : 138, 'flags' : { 'public' }, 'supporters' : { 'HP',", "{ 'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : { 'arbnumber' : 88,", "'GL_NV_geometry_program4' : { 'number' : 323, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', },", "'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : { 'arbnumber' : 153, 'flags'", ": 316, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' :", "}, 'GL_NV_geometry_shader4' : { 'number' : 338, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : { 'number' : 37, 'esnumber' :", "'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : { 'esnumber' : 64, 'flags' : {", ": 17, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', },", "}, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' :", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : { 'number' : 274,", "'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' : { 'esnumber' : 205,", ": { 'public' }, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt', },", "'number' : 407, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt', },", "{ 'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : { 'number' : 217,", "'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : { 'number' : 501, 'esnumber'", ": 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : { 'esnumber' : 145, 'flags' : { 'public'", "{ 'arbnumber' : 122, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt', },", "33, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": 6, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "{ 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : { 'number' : 40,", "'GL_SGIX_fog_scale' : { 'number' : 161, 'flags' : { 'incomplete' }, 'supporters' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : { 'arbnumber' : 163,", "{ 'esnumber' : 67, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt', },", "'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : { 'number' : 515, 'esnumber' : 292, 'flags' :", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' :", ": 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : { 'number' : 286, 'flags' : { 'public'", "'GL_ANDROID_extension_pack_es31a' : { 'esnumber' : 187, 'flags' : { 'public' }, 'url' :", "'number' : 365, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming'", "{ 'number' : 54, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'GL_ARB_pixel_buffer_object' : { 'arbnumber' : 42, 'flags' : { 'public' }, 'supporters'", "'GL_OES_depth32' : { 'esnumber' : 25, 'flags' : { 'public' }, 'url' :", "74.', }, 'WGL_ARB_create_context_profile' : { 'arbnumber' : 74, 'flags' : { 'public' },", "'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension spec with WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error' },", "'GL_APPLE_aux_depth_stencil' : { 'number' : 370, 'flags' : { 'public' }, 'supporters' :", "'GL_APPLE_texture_range' : { 'number' : 367, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : { 'number' : 300, 'flags' :", "}, 'GL_SGIX_fog_factor_to_alpha' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', },", "'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : { 'esnumber' : 138, 'flags' : {", "'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : { 'number'", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : { 'esnumber' :", "'supporters' : { 'MESA', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : {", "'number' : 199, 'flags' : { 'public' }, 'supporters' : { 'IBM' },", "'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : { 'number' : 419, 'flags' : {", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : { 'number' :", "'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : { 'number' : 430, 'esnumber'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : { 'arbnumber' : 158,", "'GL_AMD_name_gen_delete' : { 'number' : 394, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt', },", "{ 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' :", "'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : { 'number' : 415,", "{ 'arbnumber' : 51, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : { 'arbnumber'", "'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : { 'arbnumber' : 57, 'flags' : {", ": { 'number' : 3, 'flags' : { 'public' }, 'supporters' : {", "'number' : 111, 'flags' : { 'public' }, 'supporters' : { 'HP' },", ": { 'esnumber' : 29, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt',", ": 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : { 'esnumber' : 97, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', },", "}, 'GL_QCOM_binning_control' : { 'esnumber' : 119, 'flags' : { 'public' }, 'url'", "53, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : { 'arbnumber' :", ": 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : { 'arbnumber' : 51, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt',", "'number' : 473, 'esnumber' : 236, 'flags' : { 'public' }, 'url' :", ": { 'number' : 36, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 111, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' :", "'GLX_EXT_create_context_es2_profile' : { 'number' : 399, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', },", ": 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : { 'number' : 484, 'flags' : { 'public'", ": 331, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'number' : 64, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'GL_ARB_point_sprite' : { 'arbnumber' : 35, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : { 'arbnumber' :", "'GLX_EXT_buffer_age' : { 'number' : 427, 'flags' : { 'public' }, 'supporters' :", ": 417, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": { 'arbnumber' : 172, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt',", "'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : { 'esnumber'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' :", ": { 'number' : 346, 'esnumber' : 198, 'flags' : { 'public' },", "'number' : 230, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 396, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'GL_EXT_disjoint_timer_query' : { 'esnumber' : 150, 'flags' : { 'public' }, 'url' :", ": { 'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : { 'flags' :", "'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' : { 'esnumber' : 72, 'flags' : { 'public'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' :", "'GL_EXT_YUV_target' : { 'esnumber' : 222, 'flags' : { 'public' }, 'url' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : { 'number' : 326, 'flags'", ": 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : { 'esnumber' : 100, 'flags' : { 'public'", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : { 'number' : 272,", "{ 'number' : 442, 'flags' : { 'public' }, 'supporters' : { 'AMD'", ": 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : { 'number' : 11, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : { 'number' : 462, 'esnumber' : 226,", "{ 'esnumber' : 93, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt', },", ": { 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', },", "}, 'WGL_EXT_display_color_table' : { 'number' : 167, 'flags' : { 'public' }, 'url'", "'number' : 129, 'flags' : { 'public' }, 'supporters' : { 'MS' },", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt', },", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : { 'number'", "'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : { 'number' : 476, 'esnumber' : 237, 'flags' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : {", "'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : { 'esnumber' : 102, 'flags' : {", "'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : { 'number' : 517, 'esnumber'", ": { 'arbnumber' : 156, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt',", "19, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : { 'arbnumber'", "}, 'GL_IBM_vertex_array_lists' : { 'number' : 201, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : { 'esnumber' : 169, 'flags' :", ": { 'arbnumber' : 30, 'flags' : { 'public' }, 'supporters' : {", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : { 'number' :", "'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' : 135, 'flags'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : { 'esnumber' : 186,", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : { 'number'", "226, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'number' : 447, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : { 'number' :", "}, 'GL_NV_parameter_buffer_object' : { 'number' : 339, 'flags' : { 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : { 'esnumber' : 267, 'flags'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : {", "75, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : { 'number' : 380,", ": 190, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : { 'number' : 17, 'flags' : { 'public' },", "{ 'number' : 435, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : { 'number' : 394, 'flags' : { 'public' },", "}, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' : 92, 'flags' : { 'public' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : { 'number' : 138,", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : { 'flags' : { 'incomplete'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' :", ": 496, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt',", ": 379, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "290, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : { 'number' : 25, 'flags'", "'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' : { 'number' : 510, 'esnumber' : 285, 'flags'", ": 168, 'esnumber' : 191, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt',", "{ 'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer'", "'alias' : { 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' : { 'number' : 145, 'flags'", "'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt',", ": 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : { 'number' : 497, 'flags' : { 'public'", "'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : { 'number' : 460, 'esnumber'", ": 176, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' :", ": 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : { 'arbnumber' : 68, 'flags' : { 'public'", "}, 'GL_OES_viewport_array' : { 'esnumber' : 267, 'flags' : { 'public' }, 'url'", "'esnumber' : 251, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane'", "'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : { 'esnumber'", ": 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to WGL_ARB_create_context_profile not needed - see arbnumber 74.',", "}, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : { 'number' : 318, 'flags' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : {", "{ 'number' : 492, 'esnumber' : 266, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : { 'number' : 218, 'flags' : {", "'GL_NV_robustness_video_memory_purge' : { 'number' : 484, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : { 'number' : 54, 'flags' :", "}, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : { 'number' : 274, 'flags' :", "'arbnumber' : 111, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer'", "'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : { 'number' : 331, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : { 'arbnumber' : 37,", "'number' : 497, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' : { 'esnumber' :", ": 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : { 'number' : 97, 'flags' : { 'public'", "'esnumber' : 83, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : { 'number' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : { 'number'", ": { 'number' : 271, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt',", ": { 'arbnumber' : 19, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_EXT_static_vertex_array' : { 'flags' : { 'public' }, 'supporters' : { 'IBM'", "'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' :", "'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' : { 'esnumber' : 224, 'flags' : { 'public'", "'GL_OES_texture_stencil8' : { 'esnumber' : 173, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' :", ": { 'arbnumber' : 33, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_EXT_color_subtable' : { 'number' : 74, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt',", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle'", "'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : { 'number' : 131, 'flags' : { 'incomplete' },", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector'", "}, 'WGL_NV_render_texture_rectangle' : { 'number' : 264, 'flags' : { 'public' }, 'supporters'", "'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : { 'number' : 245, 'flags' : { 'public' },", ": { 'arbnumber' : 55, 'flags' : { 'public' }, 'supporters' : {", "'GL_NV_vertex_program2' : { 'number' : 287, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber'", "'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : { 'esnumber' : 175, 'flags' : {", ": { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : { 'esnumber' :", "'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : { 'number' : 257, 'flags' : { 'public' },", "'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt',", ": { 'number' : 367, 'flags' : { 'public' }, 'supporters' : {", ": 178, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' :", "}, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : { 'number' : 328, 'flags' :", "339, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : { 'number' : 211, 'flags' : { 'public' },", ": 20, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'supporters' : { 'IBM', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt', },", "'GL_ARB_viewport_array' : { 'arbnumber' : 100, 'flags' : { 'public' }, 'url' :", "}, 'GL_NV_path_rendering' : { 'number' : 410, 'esnumber' : 199, 'flags' : {", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', },", "}, 'GL_NV_query_resource_tag' : { 'number' : 512, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 432, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_NV_texgen_reflection' : { 'number' : 179, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : { 'esnumber' :", "13, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'GL_NV_shader_atomic_float' : { 'number' : 419, 'flags' : { 'public' }, 'supporters' :", "'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : { 'number' : 417, 'flags' : { 'public' },", "'number' : 17, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP',", "}, 'GL_ARB_gpu_shader_int64' : { 'arbnumber' : 178, 'flags' : { 'public' }, 'url'", "'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4'", "'GL_EXT_color_buffer_float' : { 'esnumber' : 137, 'flags' : { 'public' }, 'url' :", "'GL_OES_packed_depth_stencil' : { 'esnumber' : 44, 'flags' : { 'public' }, 'url' :", "'WGL_NV_gpu_affinity' : { 'number' : 355, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float'", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : {", ": 407, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'number' : 221, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' :", "'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : { 'arbnumber' : 194, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' : 155,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output'", "'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : { 'number'", "}, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber' : 34, 'flags' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : { 'number'", ": { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt',", "'esnumber' : 209, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex'", "174, 'esnumber' : 168, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2'", "'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : { 'arbnumber' : 121, 'flags' : {", ": 232, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'arbnumber' : 149, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat'", "'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : { 'esnumber' : 45, 'flags'", ": 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' : 277, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : { 'esnumber' : 180, 'flags' :", ": { 'number' : 428, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : { 'esnumber' : 108, 'flags' : { 'public'", "}, 'GL_INTEL_texture_scissor' : { 'number' : 135, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : { 'number' : 7, 'flags' :", "{ 'number' : 459, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', },", "'number' : 484, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats'", "'GL_WIN_phong_shading' : { 'number' : 113, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_EXT_instanced_arrays' : { 'esnumber' : 156, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', },", "{ 'arbnumber' : 22, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : { 'number' : 93, 'flags' : {", "'number' : 139, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN'", ": 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : { 'number' : 215, 'flags' : { 'public'", "58, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : {", ": 124, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : { 'number'", "'esnumber' : 181, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : {", "'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : { 'esnumber' : 40, 'flags' : {", "}, 'GL_AMD_depth_clamp_separate' : { 'number' : 401, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : { 'arbnumber' :", ": { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt',", "'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : { 'number' : 284, 'flags' : {", "{ 'number' : 502, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "}, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : { 'esnumber' : 186, 'flags' :", "'esnumber' : 77, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs'", "'GL_ARB_texture_storage_multisample' : { 'arbnumber' : 141, 'flags' : { 'public' }, 'url' :", "93, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : {", "}, 'GL_IBM_static_data' : { 'number' : 223, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : {", "'number' : 360, 'esnumber' : 50, 'flags' : { 'public' }, 'supporters' :", "'number' : 490, 'esnumber' : 263, 'flags' : { 'public' }, 'supporters' :", "15, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : { 'arbnumber' : 30, 'flags'", ": 81, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' :", "}, 'GL_EXT_convolution' : { 'number' : 12, 'flags' : { 'public' }, 'supporters'", "216, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", ": 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : { 'arbnumber' : 9, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : { 'number' : 120, 'flags' :", "{ 'number' : 77, 'flags' : { 'public' }, 'supporters' : { 'TGS'", "}, 'GL_ARB_derivative_control' : { 'arbnumber' : 163, 'flags' : { 'public' }, 'url'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : { 'arbnumber' :", "'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : { 'number' : 3, 'flags'", "{ 'esnumber' : 38, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt', },", "}, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : { 'number' : 437, 'esnumber' :", ": 7, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : { 'esnumber' : 21,", "{ 'number' : 64, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": 220, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' :", "'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : { 'esnumber' : 158, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : { 'number' : 84,", "{ 'number' : 234, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt', },", "'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : { 'number' : 319, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt',", "}, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' : {", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias'", "'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : { 'number' : 13, 'flags' : { 'public' },", "}, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber' : 106, 'flags' : { 'public' }, 'url'", "{ 'arbnumber' : 65, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', },", ": { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : { 'flags' :", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : {", "}, 'GLX_EXT_buffer_age' : { 'number' : 427, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : { 'number' :", "{ 'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : { 'number' : 88,", "}, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber' : 143, 'flags' : { 'public' }, 'url'", ": { 'arbnumber' : 93, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt',", "'flags' : { 'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url' :", "}, 'GL_WIN_specular_fog' : { 'number' : 114, 'flags' : { 'public' }, 'supporters'", "'GL_NV_copy_depth_to_color' : { 'number' : 243, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : { 'number' :", "76, 'flags' : { 'public' }, 'supporters' : { 'TGS' }, 'url' :", "}, 'GL_ARB_timer_query' : { 'arbnumber' : 85, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', },", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on Visual Workstation", "'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : { 'number' : 188, 'flags' : { 'public' },", ": { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' : { 'arbnumber' : 47, 'flags'", "'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : { 'number' : 516, 'esnumber' : 294, 'flags' :", "'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : { 'number' : 321, 'flags' : {", "'number' : 436, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'HP', 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : { 'number' :", "}, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : { 'number' : 252, 'flags' :", ": { 'arbnumber' : 139, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt',", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', },", ": 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : { 'number' : 174, 'flags' : { 'public'", "}, 'GL_SGIX_bali_g_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : { 'esnumber' : 97,", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : { 'number'", "'public' }, 'supporters' : { '3DFX', '3DL', 'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', },", "}, }, 'GL_SGIS_detail_texture' : { 'number' : 21, 'flags' : { 'public' },", "{ 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' :", "'GL_APPLE_fence' : { 'number' : 272, 'flags' : { 'public' }, 'supporters' :", "'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt',", ": { 'number' : 362, 'flags' : { 'public' }, 'supporters' : {", "'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : { 'number' : 411, 'flags' : { 'public' },", "'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : { 'esnumber' : 1, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : { 'number' : 235, 'flags' :", "'GL_EXT_clip_volume_hint' : { 'number' : 79, 'flags' : { 'public' }, 'url' :", "}, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' : 59, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : { 'esnumber' : 119, 'flags' : {", ": 19, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'GL_NV_point_sprite' : { 'number' : 262, 'flags' : { 'public' }, 'supporters'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : { 'esnumber' :", "'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' : 21, 'flags' : {", "399, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' : { 'esnumber' : 287, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : { 'arbnumber' : 30,", "}, 'GL_NV_robustness_video_memory_purge' : { 'number' : 484, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : { 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : { 'esnumber'", "'arbnumber' : 80, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : {", "'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt',", "{ 'number' : 437, 'esnumber' : 161, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : { 'number'", ": { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : { 'number' :", "}, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : { 'flags' : { 'incomplete' },", "'esnumber' : 240, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2'", "{ 'number' : 24, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number' : 519,", ": 48, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' :", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : { 'esnumber' : 193,", "}, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : { 'number' : 498, 'flags' :", "{ 'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : { 'number' : 323,", ": 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : { 'arbnumber' : 57, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'GOOGLE', 'NVIDIA', 'VMware' }, 'url'", "'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : { 'number' : 91, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : { 'number' : 227, 'flags'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision'", "'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' : 264, 'flags' : { 'public' }, 'url' :", ": { 'number' : 267, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : { 'number'", "58, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'number' : 171, 'flags' : { 'public' }, 'supporters' : {", "'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : { 'arbnumber' : 115, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint'", "}, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : { 'esnumber' : 37, 'flags' :", "'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : { 'number' : 20, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber'", "'WGL_ARB_create_context' : { 'arbnumber' : 55, 'flags' : { 'public' }, 'supporters' :", "207, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : {", "{ 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : { 'arbnumber' : 162,", "'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : { 'arbnumber' : 148, 'flags' : { 'public' },", ": { 'number' : 481, 'esnumber' : 246, 'flags' : { 'public' },", "}, 'GL_ARB_shader_atomic_counters' : { 'arbnumber' : 114, 'flags' : { 'public' }, 'url'", "{ 'number' : 240, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : {", "146, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : {", ": 159, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : { 'number' : 303,", "'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : { 'number' : 445, 'flags' : { 'public' },", "'GL_NV_blend_minmax_factor' : { 'number' : 510, 'esnumber' : 285, 'flags' : { 'public'", ": 28, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' :", "'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : { 'esnumber' : 115, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : { 'arbnumber' : 108, 'flags' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : { 'number' :", ": 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : { 'number' : 314, 'flags' : { 'public'", ": { 'esnumber' : 109, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : {", "}, 'GL_NV_shader_atomic_int64' : { 'number' : 455, 'flags' : { 'public' }, 'supporters'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart'", "{ 'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : { 'esnumber' : 40,", "'alias' : { 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' : { 'number' : 171, 'flags'", "'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : { 'number' : 67, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : { 'esnumber' : 218, 'flags'", "{ 'number' : 436, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : { 'number' : 212, 'flags' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : { 'arbnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : { 'esnumber'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : { 'esnumber' : 182, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : {", "'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : {", ": 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : { 'number' : 491, 'esnumber' : 265, 'flags'", "'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : { 'arbnumber' : 50, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt', },", "}, 'GL_EXT_texture_filter_anisotropic' : { 'number' : 187, 'esnumber' : 41, 'flags' : {", "{ 'number' : 329, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'number' : 501, 'esnumber' : 274, 'flags' : { 'public' },", "}, 'GL_EXT_pixel_buffer_object' : { 'number' : 302, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : {", "'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : { 'arbnumber' : 36, 'flags'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : { 'esnumber'", "166, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : {", ": 198, 'esnumber' : 154, 'flags' : { 'public' }, 'supporters' : {", "'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : { 'number' : 311, 'flags'", ": 146, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' :", ": 15, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'arbnumber' : 30, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : { 'arbnumber'", "}, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : { 'number' : 28, 'flags' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : {", ": { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', },", ": 237, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' :", "{ 'number' : 440, 'esnumber' : 99, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear'", "}, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : { 'number' : 417, 'flags' :", "}, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : { 'number' : 378, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt',", "'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' :", "251, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' :", "}, 'GLX_INTEL_swap_event' : { 'number' : 384, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt',", "'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : { 'number' : 172, 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : { 'arbnumber' : 122, 'flags'", ": 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' : 56, 'flags' : { 'public'", ": 'Included with arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : { 'arbnumber' : 102,", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent'", ": { 'esnumber' : 176, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt',", "'esnumber' : 265, 'flags' : { 'public' }, 'supporters' : { 'INTEL' },", "'number' : 321, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt',", "'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : { 'number' : 110, 'flags' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size' }, },", "174, 'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url' :", "}, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : { 'number' : 336, 'flags' :", "96, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : {", "'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : { 'arbnumber' : 55, 'flags'", "'esnumber' : 60, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_EXT_multiple_textures' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility'", "'number' : 228, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : { 'esnumber' : 200, 'flags' : {", "'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : { 'arbnumber' : 192, 'esnumber' : 288, 'flags' :", "'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : { 'esnumber' : 29, 'flags' : { 'public' },", "'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : { 'number' : 53, 'flags' : { 'public' },", ": 155, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' :", "}, 'GL_EXT_texture_view' : { 'esnumber' : 185, 'flags' : { 'public' }, 'url'", "'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : { 'number' : 427, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : { 'esnumber' : 186, 'flags'", ": { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : { 'number' :", "'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber' : 52, 'flags' : {", "}, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : { 'number' : 488, 'flags' :", "249, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : {", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : { 'number' :", ": 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : { 'number' : 96, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', },", "'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension which is referred to by some", "'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : { 'number' : 74, 'flags' : {", ": { 'number' : 469, 'esnumber' : 231, 'flags' : { 'public' },", ": 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : { 'number' : 379, 'flags' : { 'public'", "}, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : { 'number' : 184, 'flags' :", "120, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : {", ": { 'public' }, 'supporters' : { 'IBM', 'IMG', 'SUN' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : { 'number'", "90, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : {", ": 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : { 'arbnumber' : 120, 'flags' : { 'public'", "250, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : {", "}, 'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : { 'number' : 257, 'flags' :", "'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : { 'number' : 114, 'flags'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber' : 114,", "'flags' : { 'public' }, 'supporters' : { 'MESA', 'NVIDIA' }, 'url' :", "{ 'esnumber' : 220, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', },", ": { 'number' : 214, 'flags' : { 'incomplete', 'public' }, 'url' :", "{ 'number' : 100, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' : { 'esnumber'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' :", ": 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : { 'esnumber' : 207, 'flags' : { 'public'", "'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : { 'number' : 276, 'flags' : { 'public' },", "}, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : { 'number' : 151, 'flags' :", ": { 'esnumber' : 204, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt',", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', },", "}, 'WGL_ARB_create_context_robustness' : { 'arbnumber' : 102, 'flags' : { 'public' }, 'url'", "'esnumber' : 147, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering'", ": 135, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' :", "'esnumber' : 38, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2'", "'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : { 'number' : 237, 'flags' : { 'public' },", ": { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt',", "'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number' : 485, 'flags' : { 'public' },", "'number' : 399, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : { 'number'", "}, }, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' : 189, 'esnumber' : 249, 'flags' :", "'GL_NV_framebuffer_multisample_coverage' : { 'number' : 336, 'flags' : { 'public' }, 'supporters' :", ": 71, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' :", "{ 'number' : 57, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'number' : 255, 'flags' : { 'public' }, 'supporters' : { 'I3D'", "}, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : { 'number' : 439, 'esnumber' :", "'arbnumber' : 190, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5'", "}, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : { 'esnumber' : 240, 'flags' :", "'number' : 386, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : { 'arbnumber' : 133, 'flags' : { 'public' },", "'GL_ARB_depth_texture' : { 'arbnumber' : 22, 'flags' : { 'public' }, 'supporters' :", "'GL_EXT_timer_query' : { 'number' : 319, 'flags' : { 'public' }, 'supporters' :", "73, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : {", "}, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : { 'number' : 64, 'flags' :", "}, 'GL_EXT_gpu_shader4' : { 'number' : 326, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 418, 'esnumber' : 197, 'flags' : { 'public' },", "'number' : 275, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", ": 'Partial HP support.', }, 'GL_SGI_complex' : { 'number' : 87, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : { 'arbnumber' : 59, 'flags' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info'", ": 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : { 'number' : 406, 'flags' : { 'public'", "'supporters' : { 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context'", ": 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : { 'flags' : { 'incomplete' }, 'url' :", "'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : { 'number' : 503, 'esnumber' : 280, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' },", "'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' : 79, 'flags'", "'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : { 'number' : 65, 'flags' : {", ": { 'number' : 269, 'flags' : { 'public' }, 'supporters' : {", "336, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "408, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "{ 'esnumber' : 62, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', },", ": { 'number' : 280, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : { 'esnumber' : 183, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : { 'esnumber' : 255,", ": { 'number' : 45, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm'", "'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : { 'arbnumber' : 147, 'flags' : {", ": { 'arbnumber' : 192, 'esnumber' : 288, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample'", "}, }, 'GL_EXT_packed_pixels' : { 'number' : 23, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : { 'number' : 306, 'flags'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : {", ": 129, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : { 'arbnumber' :", "129, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : {", "'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : { 'number' : 437, 'esnumber' : 161,", "'public' }, 'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously", ": 71, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' :", ": 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : { 'number' : 31, 'flags' : { 'public'", ": 10, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' :", "'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt',", "77, 'flags' : { 'public' }, 'supporters' : { 'TGS' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : { 'esnumber' : 108, 'flags' :", "'number' : 409, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA'", ": 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : { 'esnumber' : 181, 'flags' : { 'public'", "{ 'number' : 56, 'flags' : { 'public' }, 'supporters' : { 'HP',", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : { 'number' : 248,", ": { 'esnumber' : 82, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt',", "'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : { 'number' : 6,", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' :", "'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : { 'number' : 13, 'flags' : {", ": { 'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : { 'esnumber' :", ": 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : { 'number' : 292, 'esnumber' : 9, 'flags'", "'esnumber' : 48, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : { 'esnumber' : 185,", ": { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : { 'esnumber'", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : { 'number'", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : { 'esnumber' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt',", "'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : { 'esnumber' : 290, 'flags' : {", "'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : { 'number' : 403, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : { 'arbnumber' :", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt',", "}, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' : {", "{ 'number' : 494, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap'", "'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : { 'number' : 33, 'flags' : {", "}, 'GL_SUN_global_alpha' : { 'number' : 164, 'flags' : { 'public' }, 'supporters'", "to GLX_ARB_create_context_profile not needed - see arbnumber 75.', }, 'GLX_ARB_create_context_no_error' : { 'arbnumber'", "'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : { 'number' : 138, 'flags' : { 'public' },", "384, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : {", ": { 'number' : 492, 'esnumber' : 266, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : { 'esnumber' : 149, 'flags'", "'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : { 'esnumber' : 12, 'flags' : {", ": 324, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : { 'esnumber' : 63,", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : { 'flags'", ": 97, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' :", "'GL_EXT_texture_sRGB' : { 'number' : 315, 'flags' : { 'public' }, 'supporters' :", "137, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', },", "'GL_NV_uniform_buffer_unified_memory' : { 'number' : 459, 'flags' : { 'public' }, 'url' :", ": 59, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' :", "'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : { 'number' : 363, 'flags'", "{ 'arbnumber' : 164, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', },", "'GL_EXT_scene_marker' : { 'number' : 120, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt', },", ": 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : { 'number' : 280, 'flags' : { 'public'", "'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : { 'number' : 401, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : { 'arbnumber' : 39, 'flags' : {", ": 114, 'flags' : { 'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' :", "}, 'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : { 'number' : 313, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt',", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', },", ": { 'esnumber' : 77, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt',", "}, 'GL_ARB_get_texture_sub_image' : { 'arbnumber' : 165, 'flags' : { 'public' }, 'url'", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : { 'number' : 245,", "274, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt',", ": { 'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : { 'number' :", "'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : { 'number' : 12, 'flags'", "'number' : 126, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : { 'arbnumber' : 35,", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', },", "}, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : { 'number' : 79, 'flags' :", "'number' : 341, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt',", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' :", "'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : { 'number' : 414, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : { 'number' : 400, 'flags'", "}, 'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : { 'esnumber' : 33, 'flags' :", ": { 'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : { 'number' :", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : { 'number' : 298,", "246, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : { 'esnumber' : 121, 'flags' : { 'public' },", "'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : { 'number' : 299,", "'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber' : 144, 'flags'", "}, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : { 'number' : 366, 'flags' :", "'GL_SGIX_fog_factor_to_alpha' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' :", ": 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : { 'number' : 208, 'flags' : { 'public'", "{ 'arbnumber' : 151, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', },", "'number' : 410, 'esnumber' : 199, 'flags' : { 'public' }, 'supporters' :", ": 38, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'GL_NV_shader_buffer_load' : { 'number' : 379, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : { 'flags' : { 'public' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : { 'number' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', },", "'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : { 'number' : 189, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : { 'arbnumber' : 53, 'flags' :", "{ 'esnumber' : 219, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', },", ": { 'number' : 180, 'flags' : { 'incomplete' }, 'supporters' : {", "'GL_ARB_compatibility' : { 'arbnumber' : 58, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt',", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : {", ": 210, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' :", "319, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 67, 'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url'", "'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : { 'number' : 165, 'flags' : {", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : { 'number' :", "}, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber' : 223, 'flags' :", "'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : { 'esnumber' : 57, 'flags'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : { 'number' : 341, 'flags'", "}, 'GL_NV_tessellation_program5' : { 'number' : 391, 'flags' : { 'public' }, 'supporters'", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : { 'number' :", "'public' }, 'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : { 'esnumber' : 19, 'flags'", "377, 'esnumber' : 101, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA',", "13, 'flags' : { 'incomplete', 'private' }, 'comments' : 'Draft spec location unknown.',", "}, 'GL_EXT_pixel_transform' : { 'number' : 138, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt', },", "}, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : { 'number' : 163, 'flags' :", "}, 'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : { 'number' : 432, 'flags' :", "'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : { 'esnumber' : 149, 'flags' : {", "}, 'GL_NV_viewport_swizzle' : { 'number' : 483, 'esnumber' : 258, 'flags' : {", ": 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : { 'number' : 237, 'flags' : { 'public'", "'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : { 'esnumber' : 216, 'flags' : { 'public' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : { 'number'", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : { 'arbnumber'", "241, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : {", "}, 'GL_ARB_texture_swizzle' : { 'arbnumber' : 84, 'flags' : { 'public' }, 'url'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : { 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : {", "'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : { 'number' : 282, 'flags' : { 'public' },", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : { 'esnumber' : 267,", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias'", "{ 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : { 'number'", "'GL_NV_pixel_buffer_object' : { 'esnumber' : 134, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : { 'esnumber' : 36,", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : { 'number' :", "'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber' : 187, 'flags' : { 'public' },", "'number' : 118, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "118, 'esnumber' : 117, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : { 'number' :", "140, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : {", "'number' : 408, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "{ 'arbnumber' : 107, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : { 'esnumber'", "'WGL_I3D_swap_frame_lock' : { 'number' : 254, 'flags' : { 'public' }, 'supporters' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : { 'esnumber' :", "}, 'GL_ARB_half_float_pixel' : { 'arbnumber' : 40, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : { 'esnumber' :", "'GL_ARB_explicit_uniform_location' : { 'arbnumber' : 128, 'flags' : { 'public' }, 'url' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : { 'number' : 205, 'flags'", ": { 'public' }, 'supporters' : { 'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', },", ": { 'arbnumber' : 36, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' : 185, 'flags' :", "'GLX_MESA_query_renderer' : { 'number' : 446, 'flags' : { 'public' }, 'url' :", "{ 'esnumber' : 172, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', },", "'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : { 'arbnumber' : 140, 'flags' : {", "'number' : 325, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "69, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'number' : 378, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : { 'number' :", ": 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : { 'number' : 303, 'flags' : { 'public'", "}, 'GL_EXT_geometry_shader4' : { 'number' : 324, 'flags' : { 'public' }, 'supporters'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : { 'number' : 225, 'flags'", "{ 'arbnumber' : 180, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', },", "}, 'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : { 'esnumber' : 153, 'flags' :", "212, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : {", "{ 'number' : 400, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'number' : 217, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'GL_ARB_shading_language_include' : { 'arbnumber' : 76, 'flags' : { 'public' }, 'url' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : { 'number'", "'GL_NV_register_combiners' : { 'number' : 191, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_ARB_texture_env_dot3' : { 'arbnumber' : 19, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : { 'number'", "}, 'GL_EXT_bindable_uniform' : { 'number' : 342, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 224, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt',", ": 420, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/WGL_OML_sync_control.txt', }, }", "268, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : {", "'number' : 175, 'flags' : { 'public' }, 'supporters' : { 'INGR' },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp'", "{ 'number' : 318, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : { 'number' : 406, 'flags'", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators'", "'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : { 'number' : 290, 'flags' : { 'public' },", "52, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'number' : 452, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', },", "'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : {", ": { 'esnumber' : 133, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_array.txt',", "'arbnumber' : 8, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : { 'esnumber' : 29, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt',", ": 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : { 'number' : 67, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt',", "}, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : { 'number' : 229, 'flags' :", "144, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : {", ": 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : { 'number' : 133, 'flags' : { 'incomplete',", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : {", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' :", ": 107, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : {", "'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber' : 187, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt',", ": 146, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : { 'arbnumber'", "'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : { 'arbnumber' : 91, 'flags' : {", "'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : { 'esnumber' : 137, 'flags' : {", "257, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : {", "'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : { 'number' : 499, 'flags' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : { 'flags' : { 'incomplete'", ": 11, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'KGC', 'SGI',", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : { 'number'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators'", "'GL_OES_stencil8' : { 'esnumber' : 33, 'flags' : { 'public' }, 'url' :", ": { 'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : { 'esnumber' :", "'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : { 'arbnumber' : 9, 'flags' : {", ": { 'arbnumber' : 148, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt',", ": { 'arbnumber' : 53, 'flags' : { 'public' }, 'supporters' : {", "'GL_AMD_shader_explicit_vertex_parameter' : { 'number' : 485, 'flags' : { 'public' }, 'url' :", "'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : { 'esnumber' : 211, 'flags' : { 'public' },", "'esnumber' : 65, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM',", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : { 'number' : 307, 'flags'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : {", ": { 'esnumber' : 39, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt',", ": { 'arbnumber' : 78, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt',", "'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : { 'number' : 289, 'flags'", ": { 'number' : 231, 'flags' : { 'public' }, 'supporters' : {", "'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : { 'number' : 373, 'esnumber' : 76, 'flags' :", "'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : { 'esnumber' : 136, 'flags' : {", "}, 'GL_ARB_texture_env_add' : { 'arbnumber' : 6, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : { 'number' : 189,", ": 134, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' },", "'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : { 'number' : 41, 'flags'", "'GL_OES_depth_texture_cube_map' : { 'esnumber' : 136, 'flags' : { 'public' }, 'url' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : { 'arbnumber' : 2,", "}, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : { 'number' : 91, 'flags' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : { 'number' :", "'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : { 'arbnumber' : 42, 'flags' : { 'public' },", "'esnumber' : 99, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'esnumber' : 272, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer'", "'GL_OML_resample' : { 'number' : 241, 'flags' : { 'public' }, 'supporters' :", ": 498, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt',", "'number' : 43, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : { 'number'", "'GL_APPLE_row_bytes' : { 'number' : 372, 'flags' : { 'public' }, 'supporters' :", "'GL_SGIS_texture_border_clamp' : { 'number' : 36, 'flags' : { 'public' }, 'supporters' :", "'number' : 420, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : { 'esnumber' : 64, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : { 'esnumber' :", "'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : { 'number' : 443, 'esnumber' : 164,", "'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : { 'number' : 223, 'flags' : { 'public' },", ": 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : { 'number' : 182, 'flags' : { 'public'", "434, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "154, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url'", "'arbnumber' : 83, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8'", "{ 'number' : 369, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt',", "'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : { 'number' : 302, 'flags' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : { 'number'", "'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : { 'esnumber' : 38, 'flags'", "{ 'number' : 241, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "200, 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' :", "'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : { 'number' : 30, 'flags'", ": 296, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' :", "482, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : {", "}, 'GL_OES_texture_npot' : { 'esnumber' : 37, 'flags' : { 'public' }, 'url'", "{ 'number' : 213, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : { 'number' : 251, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : { 'number' : 2, 'flags' :", "{ 'number' : 334, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'GL_NV_shader_thread_shuffle' : { 'number' : 448, 'flags' : { 'public' }, 'url'", "54, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc'", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' :", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : {", ": 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : { 'number' : 170, 'flags' : { 'public'", "}, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : { 'number' : 89, 'flags' :", "{ 'number' : 9, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : { 'arbnumber' : 103, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : { 'arbnumber' : 140, 'flags' :", "}, 'GL_EXT_texture_perturb_normal' : { 'number' : 147, 'flags' : { 'public' }, 'url'", ": 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : { 'esnumber' : 290, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', },", "'GL_AMD_vertex_shader_viewport_index' : { 'number' : 416, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : { 'flags' : { 'incomplete'", ": 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : { 'arbnumber' : 80, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt',", "'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : { 'number' : 484, 'flags' : {", "'GL_ARB_ES3_compatibility' : { 'arbnumber' : 127, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : { 'number'", "'GL_WIN_scene_markerXXX' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog'", ": { 'arbnumber' : 15, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' : 83, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : { 'esnumber' : 247, 'flags' :", ": 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : { 'arbnumber' : 113, 'flags' : { 'public'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : { 'arbnumber' : 67, 'flags'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : {", "{ 'number' : 95, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", "'number' : 379, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_OES_primitive_bounding_box' : { 'esnumber' : 212, 'flags' : { 'public' }, 'url' :", "{ 'number' : 420, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format'", "{ 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' : { 'number' : 72, 'flags' : {", ": 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : { 'number' : 317, 'flags' : { 'public'", "{ 'arbnumber' : 114, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', },", "{ 'number' : 50, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : { 'number' : 81, 'flags' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', },", "'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : { 'number' : 246, 'flags'", "'GL_AMD_program_binary_Z400' : { 'esnumber' : 48, 'flags' : { 'public' }, 'url' :", ": 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' : 135, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : { 'number' : 78, 'flags' :", "}, 'GL_HP_texture_lighting' : { 'number' : 111, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : { 'number'", "}, 'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : { 'number' : 92, 'flags' :", "'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' : { 'esnumber'", "}, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : { 'number' : 391, 'flags' :", ": 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : { 'number' : 117, 'flags' : { 'public'", ": 'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' : { 'number' :", "}, 'GL_EXT_protected_textures' : { 'esnumber' : 256, 'flags' : { 'public' }, 'url'", "}, 'GL_NV_blend_equation_advanced' : { 'number' : 433, 'esnumber' : 163, 'flags' : {", "'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' : { 'number' : 98,", "199, 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' :", "{ 'number' : 398, 'flags' : { 'public' }, 'supporters' : { 'AMD'", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt', },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : { 'number'", "}, 'GL_EXT_draw_range_elements' : { 'number' : 112, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt',", "'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : { 'esnumber' : 240, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : {", "see arbnumber 74.', }, 'WGL_ARB_create_context_profile' : { 'arbnumber' : 74, 'flags' : {", "'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : { 'esnumber'", "'arbnumber' : 22, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'number' : 341, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : { 'number' : 422, 'flags' : {", ": { 'number' : 53, 'flags' : { 'public' }, 'supporters' : {", ": { 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' : { 'number' : 278, 'flags' :", "221, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "{ 'number' : 238, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp'", "}, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : { 'number' : 269, 'flags' :", "'esnumber' : 236, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters'", "'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : { 'number' : 169, 'flags' : {", ": { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : { 'number' :", ": { 'esnumber' : 156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation' }, },", "'GL_NV_geometry_shader4' : { 'number' : 338, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_NV_sample_locations' : { 'number' : 472, 'esnumber' : 235, 'flags' : {", ": 69, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : { 'arbnumber' : 16, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber'", "'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : { 'number' : 482, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' : { 'number'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt', },", "{ 'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : { 'esnumber' : 251,", "'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' : 151, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer'", "'number' : 146, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add'", "{ 'esnumber' : 47, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt', },", "'alias' : { 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' : { 'flags' : { 'obsolete'", "38, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : {", ": 35, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": { 'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : { 'number' :", "'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : { 'number' : 415, 'flags'", "{ 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' :", "'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions'", "'esnumber' : 64, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query'", ": 164, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : { 'number' : 45, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with arbnumber 56, GLX_ARB_create_context.',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : { 'arbnumber'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : {", "76, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : { 'esnumber' : 247, 'flags' : { 'public' },", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : { 'esnumber' : 179,", "}, 'GL_EXT_texture_buffer' : { 'esnumber' : 183, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt', },", "}, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : { 'flags' : { 'incomplete', 'obsolete'", "'GL_IBM_static_data' : { 'number' : 223, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt',", "{ 'number' : 392, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'arbnumber' : 168, 'esnumber' : 191, 'flags' : { 'public' }, 'url' :", "'arbnumber' : 56, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' :", ": { 'number' : 504, 'esnumber' : 281, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : { 'arbnumber' : 85, 'flags' : {", "'number' : 133, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI'", ": 140, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "}, 'GL_APPLE_object_purgeable' : { 'number' : 371, 'flags' : { 'public' }, 'supporters'", "485, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : {", ": 98, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", ": { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt',", "}, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : { 'number' : 30, 'flags' :", "'esnumber' : 267, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace'", "177, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size'", "}, 'GL_NV_shadow_samplers_array' : { 'esnumber' : 146, 'flags' : { 'public' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : { 'flags' : { 'incomplete'", "92, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : {", "}, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : {", ": 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : { 'number' : 138, 'flags' : { 'public'", "111, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1',", "'esnumber' : 217, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to WGL_ARB_create_context_profile", ": 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : { 'number' : 273, 'flags' : { 'public'", "{ 'number' : 406, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', },", "{ 'number' : 302, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url'", ": { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : { 'number' :", ": { 'arbnumber' : 120, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt',", "'esnumber' : 182, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer'", "'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : { 'number' : 182, 'flags' : {", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : { 'number' :", "'flags' : { 'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : { 'number'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : { 'number' : 268,", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt',", "{ 'number' : 517, 'esnumber' : 297, 'flags' : { 'public' }, 'url'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : { 'arbnumber' : 29,", "{ 'esnumber' : 209, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', },", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : { 'number' : 89,", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : { 'number' : 211, 'flags'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : { 'number' :", ": { 'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : { 'number' :", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' :", "'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : { 'number' : 360, 'esnumber'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : { 'arbnumber' : 7, 'flags'", "'GL_NV_texture_rectangle' : { 'number' : 229, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : { 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : { 'number'", ": 257, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' :", "'esnumber' : 84, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays'", "'esnumber' : 70, 'flags' : { 'public' }, 'supporters' : { 'QCOM' },", "131, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : {", "'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : { 'number' : 411, 'flags'", "}, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : { 'esnumber' : 175, 'flags' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : {", "{ 'number' : 150, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : { 'number' :", ": 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : { 'number' : 160, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt', },", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX'", "'number' : 138, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt', },", "161, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' :", "'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : { 'number' : 376, 'flags'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' :", "}, 'GL_ARB_explicit_uniform_location' : { 'arbnumber' : 128, 'flags' : { 'public' }, 'url'", ": 107, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' :", "}, 'GL_ARB_ES2_compatibility' : { 'arbnumber' : 95, 'flags' : { 'public' }, 'url'", "{ 'number' : 227, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 113, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' :", ": 286, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'esnumber' : 85, 'flags' : { 'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : { 'number' : 259,", "'GL_OES_texture_border_clamp' : { 'esnumber' : 215, 'flags' : { 'public' }, 'url' :", ": { 'number' : 322, 'flags' : { 'public' }, 'supporters' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : { 'arbnumber' : 32,", "'comments' : 'Different that the OpenGL extension with the same name string.', },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' :", "}, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : { 'number' : 248, 'flags' :", ": { 'number' : 294, 'esnumber' : 6, 'flags' : { 'public' },", "'number' : 74, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array'", ": 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : { 'number' : 320, 'flags' : { 'public'", "'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber' : 74, 'flags' : { 'public' },", "'GL_EXT_copy_image' : { 'esnumber' : 175, 'flags' : { 'public' }, 'url' :", "'esnumber' : 2, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract'", "'HP', 'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : { 'number' : 399,", ": { 'esnumber' : 51, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt',", "'GL_MESA_resize_buffers' : { 'number' : 196, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 149, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : { 'arbnumber' : 79,", "'esnumber' : 145, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query'", "'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : { 'esnumber' : 162, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : { 'number' : 509, 'flags'", "70, 'flags' : { 'public' }, 'supporters' : { 'QCOM' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber'", ": 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : { 'number' : 52, 'flags' : { 'public'", "'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : { 'number' : 438, 'flags'", "}, 'GL_SGIX_ycrcb_subsample' : { 'number' : 204, 'flags' : { 'incomplete' }, 'supporters'", "}, 'GL_NV_draw_instanced' : { 'esnumber' : 141, 'flags' : { 'public' }, 'url'", "{ 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : { 'number' :", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen'", ": 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : { 'number' : 205, 'flags' : { 'incomplete'", "'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : { 'esnumber' : 136, 'flags' : { 'public' },", "'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : { 'number' : 82, 'flags' : { 'incomplete' },", "}, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : { 'number' : 376, 'flags' :", "'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : { 'number' : 178, 'flags' : {", ": { 'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : { 'esnumber' :", "'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : { 'number' : 403, 'flags' : { 'public' },", "}, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : {", "151, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : {", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : { 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber'", "113, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : {", ": 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : { 'number' : 59, 'flags' : { 'incomplete'", "{ 'esnumber' : 186, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', },", "'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' : 291, 'flags' : {", "'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : { 'number' : 130, 'flags' : {", "'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : { 'number' : 64, 'flags' : { 'public' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : { 'number' :", "'GL_EXT_gpu_shader5' : { 'esnumber' : 178, 'flags' : { 'public' }, 'url' :", "'GL_OES_stencil4' : { 'esnumber' : 32, 'flags' : { 'public' }, 'url' :", "'GL_ARB_explicit_attrib_location' : { 'arbnumber' : 79, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage'", "'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' : 151, 'flags' : {", "{ 'esnumber' : 22, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', },", "52, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' :", ": { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt',", "}, 'GL_AMD_debug_output' : { 'number' : 395, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : { 'number' : 434,", "'GL_EXT_texture_format_sRGB_override' : { 'esnumber' : 299, 'flags' : { 'public' }, 'url' :", ": 501, 'esnumber' : 274, 'flags' : { 'public' }, 'supporters' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : { 'number' : 190, 'flags'", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias' : {", "'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' : { 'number' : 324, 'flags' : { 'public'", ": { 'esnumber' : 203, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt',", "370, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : { 'arbnumber' : 174, 'esnumber' : 168,", ": { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : { 'number' :", "87, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : {", "}, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber' : 176, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO'", ": 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : { 'esnumber' : 61, 'flags' : { 'public'", "{ 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' : { 'esnumber' : 224, 'flags' : {", ": { 'arbnumber' : 160, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt',", "{ 'esnumber' : 134, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', },", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', },", "'GL_ARB_conditional_render_inverted' : { 'arbnumber' : 161, 'flags' : { 'public' }, 'url' :", "'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' : 189, 'esnumber' : 249, 'flags'", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order'", "}, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' : 86, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : {", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions'", "'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : { 'number' : 34, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : { 'esnumber'", "{ 'number' : 277, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : { 'arbnumber' : 22,", "'number' : 402, 'esnumber' : 152, 'flags' : { 'public' }, 'supporters' :", "with arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : { 'arbnumber' : 102, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt',", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt', },", "173, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : {", ": 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' : { 'number' :", "'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : { 'arbnumber' : 127, 'flags' : {", ": { 'esnumber' : 185, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt',", "'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : { 'number' : 361, 'flags' : {", "'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt',", "'GLX_EXT_swap_control_tear' : { 'number' : 414, 'flags' : { 'public' }, 'supporters' :", "'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : { 'esnumber' : 181, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : { 'esnumber'", "'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : { 'esnumber' : 179, 'flags' : { 'public' },", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : { 'arbnumber'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : { 'esnumber'", "}, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : { 'number' : 380, 'flags' :", "'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : { 'number' : 447, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' : 182, 'flags' :", "'esnumber' : 96, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle'", "}, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : { 'number' : 96, 'flags' :", "'arbnumber' : 86, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array'", "'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : { 'arbnumber' : 49, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' :", "}, 'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : { 'arbnumber' : 39, 'flags' :", ": { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' :", ": 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : { 'number' : 248, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' : 140, 'flags'", "'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : {", "'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' : 264, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt',", ": { 'number' : 499, 'flags' : { 'public' }, 'supporters' : {", "'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : { 'number' : 95,", "}, 'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : { 'arbnumber' : 43, 'flags' :", "{ 'number' : 512, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' :", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : {", "{ 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into", "'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : { 'esnumber' : 13, 'flags' : {", "'GL_OES_texture_mirrored_repeat' : { 'esnumber' : 22, 'flags' : { 'public' }, 'url' :", ": 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' : { 'incomplete' }, 'url' :", "{ 'esnumber' : 210, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : { 'esnumber'", ": 41, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension which is referred to by", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : { 'number'", "}, 'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : { 'esnumber' : 87, 'flags' :", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : { 'flags' : { 'incomplete'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : { 'arbnumber' :", "'incomplete', 'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension which is referred", "'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : { 'number' :", ": 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : { 'number' : 274, 'flags' : { 'public'", "170, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : { 'number'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' :", ": { 'number' : 185, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 135, 'flags' : { 'public' }, 'supporters' : {", "'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : { 'number' : 14, 'flags'", "'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : { 'number' : 169, 'flags'", "'IBM', 'IMG', 'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays' }, },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' :", "'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : { 'arbnumber' : 180, 'flags' : {", "'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : { 'number' : 158, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt', },", "'../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : { 'esnumber' : 1, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : { 'number' : 178,", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' :", "'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' : { 'number' : 116,", ": { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt',", "'GL_ARB_occlusion_query2' : { 'arbnumber' : 80, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' :", "121, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : {", "{ 'esnumber' : 66, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', },", ": 229, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'number' : 373, 'esnumber' : 76, 'flags' : { 'public' }, 'supporters' :", ": { 'esnumber' : 66, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt',", ": { 'number' : 50, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : { 'esnumber' : 245, 'flags' : { 'incomplete',", "'arbnumber' : 107, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture'", ": { 'esnumber' : 84, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt',", ": 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : { 'number' : 132, 'flags' : { 'incomplete',", ": 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : { 'flags' : { 'incomplete' }, 'url' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' :", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber'", "'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt',", "{ 'number' : 207, 'flags' : { 'public' }, 'supporters' : { '3DFX'", "}, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' : 56, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' : 5, 'flags' :", "}, 'GL_EXT_clear_texture' : { 'esnumber' : 269, 'flags' : { 'public' }, 'url'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : { 'number'", "437, 'esnumber' : 161, 'flags' : { 'public' }, 'supporters' : { 'INTEL'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : { 'arbnumber'", "'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : { 'number' : 450, 'flags' : { 'public' },", ": 92, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : {", ": '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : { 'number' : 424, 'flags' : { 'public'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' :", "}, 'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : { 'number' : 305, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : { 'esnumber' : 155, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : { 'esnumber'", "'arbnumber' : 87, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced'", "'arbnumber' : 99, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding'", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' :", "'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : { 'number'", ": { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : { 'number' :", "'esnumber' : 225, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box'", ": 131, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", ": 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : { 'esnumber' : 210, 'flags' : { 'public'", ": 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : { 'number' : 515, 'esnumber' : 292, 'flags'", ": 84, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' :", "'GL_NV_conditional_render' : { 'number' : 346, 'esnumber' : 198, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : { 'number' : 402, 'esnumber'", ": 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : { 'number' : 176, 'flags' : { 'public'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : { 'number' :", "}, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : { 'number' : 18, 'flags' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', },", "}, 'GL_NV_copy_buffer' : { 'esnumber' : 158, 'flags' : { 'public' }, 'url'", "'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : { 'arbnumber' : 144, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : { 'arbnumber' : 68, 'flags' :", "'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : { 'number' : 340, 'flags' : {", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer'", "{ 'arbnumber' : 94, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', },", "'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : { 'esnumber' : 145, 'flags' : {", ": { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : { 'number' :", ": 41, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : { 'number'", "'number' : 401, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'supporters' : { 'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : {", "300, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' :", "{ 'arbnumber' : 58, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt', },", "'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : { 'esnumber' : 182, 'flags' : {", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : { 'flags' :", "'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : { 'number' : 63, 'flags' : { 'public' },", "'GL_ARB_shader_image_size' : { 'arbnumber' : 136, 'flags' : { 'public' }, 'url' :", "}, 'GL_EXT_texture_cube_map' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments'", "'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : { 'arbnumber' : 41, 'flags'", "79, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : {", ": { 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' :", "}, 'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : { 'esnumber' : 73, 'flags' :", "'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : { 'arbnumber' : 116, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : { 'esnumber'", "}, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : { 'number' : 128, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt',", "'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : { 'arbnumber' : 70, 'flags' : { 'public' },", ": 173, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' :", "}, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : { 'number' : 155, 'flags' :", "'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : { 'arbnumber' : 180, 'flags' : { 'public' },", "'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : { 'number' : 470, 'esnumber' : 233,", ": 215, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' :", "'GL_ARB_texture_env_crossbar' : { 'arbnumber' : 18, 'flags' : { 'public' }, 'supporters' :", "'GL_AMD_compressed_ATC_texture' : { 'esnumber' : 40, 'flags' : { 'public' }, 'url' :", "number 15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : { 'number' : 127, 'flags' :", "'esnumber' : 68, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc'", ": 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : { 'arbnumber' : 104, 'flags' : { 'public'", ": 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : { 'number' : 443, 'esnumber' : 164, 'flags'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' :", "'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' : { 'number' : 285, 'flags' : { 'public'", "{ 'number' : 11, 'flags' : { 'public' }, 'supporters' : { 'INGR',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : { 'number' : 306,", "335, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'GL_OES_texture_env_crossbar' : { 'esnumber' : 21, 'flags' : { 'public' }, 'url' :", ": { 'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' :", "}, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : { 'arbnumber' : 165, 'flags' :", ": { 'number' : 27, 'flags' : { 'public' }, 'supporters' : {", ": 106, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' :", "'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' : { 'number' : 348,", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', },", "'GL_EXT_base_instance' : { 'esnumber' : 203, 'flags' : { 'public' }, 'url' :", "183, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : {", "'3DL', 'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : { 'esnumber' : 240,", "'arbnumber' : 151, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev'", "'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' : 276, 'flags' : { 'public' }, 'url' :", ": 38, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' :", "}, 'GL_SGIX_texture_phase' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', },", ": { 'number' : 41, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : {", "'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : { 'arbnumber' : 78, 'flags' : {", "'GL_QCOM_extended_get' : { 'esnumber' : 62, 'flags' : { 'public' }, 'url' :", "'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' :", "96, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url'", "'number' : 338, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber' :", "'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : { 'number' : 433, 'esnumber' : 163,", "{ 'number' : 236, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' : 166, 'flags' : { 'public' },", ": { 'esnumber' : 40, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt',", "'GL_EXT_histogram' : { 'number' : 11, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : { 'number' : 80, 'flags' : { 'public'", "}, 'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : { 'esnumber' : 171, 'flags' :", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : {", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', },", "154, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : {", "'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4'", "'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : { 'arbnumber' : 14, 'flags' : { 'public' },", "'number' : 433, 'esnumber' : 163, 'flags' : { 'public' }, 'supporters' :", "{ 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' : { 'arbnumber' : 126, 'flags' : {", "{ 'number' : 487, 'esnumber' : 262, 'flags' : { 'public' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : { 'number' : 59, 'flags'", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : { 'arbnumber' : 95,", "'number' : 31, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays'", "'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : { 'esnumber' : 92, 'flags' : { 'public' },", ": 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : { 'number' : 428, 'flags' : { 'public'", "}, 'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : { 'number' : 368, 'flags' :", ": { 'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : { 'number' :", "{ 'arbnumber' : 77, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', },", "'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : { 'number' : 358, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt',", ": { 'esnumber' : 72, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt',", ": { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt',", "'esnumber' : 253, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod'", "'public' }, 'supporters' : { 'MESA', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer'", ": { 'number' : 218, 'flags' : { 'public' }, 'supporters' : {", "126, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'GL_OES_blend_subtract' : { 'esnumber' : 3, 'flags' : { 'public' }, 'url'", "'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : { 'number' : 147,", ": 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' : { 'incomplete' }, 'url' :", "'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : { 'number' : 150, 'flags' : { 'incomplete' },", "'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : { 'number' : 96, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt',", "}, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : { 'flags' : { 'incomplete' },", "'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' :", "'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : { 'number' : 85, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt',", "}, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' :", "'esnumber' : 178, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram'", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : { 'number' : 80, 'flags'", ": 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : { 'arbnumber' : 115, 'flags' : { 'public'", "'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' : { 'number' : 285,", ": 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : { 'arbnumber' : 23, 'flags' : { 'public'", "}, 'GL_SGIX_subsample' : { 'number' : 202, 'flags' : { 'incomplete' }, 'supporters'", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' :", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : { 'number' :", "{ 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting'", "{ 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : { 'number' :", "}, 'GL_ARB_texture_env_combine' : { 'arbnumber' : 17, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 74, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt', },", "'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : { 'number' : 282, 'flags' : {", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' :", "'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : { 'arbnumber' : 20, 'flags'", "'arbnumber' : 185, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100'", ": 105, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' :", "'INGR', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : { 'number' : 9,", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber' : 52,", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt',", ": 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : { 'esnumber' : 3, 'flags' : { 'public'", "20, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'comments' : 'Draft extension which is referred to by some other vendor extensions,", "'WGL_EXT_colorspace' : { 'number' : 498, 'flags' : { 'public' }, 'supporters' :", "250, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' :", ": '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : { 'esnumber' : 91, 'flags' : { 'public'", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' :", "'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : { 'number' :", "'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : { 'esnumber' : 195, 'flags' : {", "'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' :", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt',", "}, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : {", ": { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp'", "name string.', }, 'GL_EXT_separate_specular_color' : { 'number' : 144, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped but was not fully specified. Similar", "'alias' : { 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' : { 'number' : 510, 'esnumber'", "'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' : { 'esnumber' : 177,", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage'", "}, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : { 'number' : 279, 'flags' :", "'esnumber' : 206, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal'", "81, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : {", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber' : 84, 'flags'", "'esnumber' : 166, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers'", "110, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : {", "'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : { 'number' : 462, 'esnumber'", ": 27, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : { 'number'", "{ 'esnumber' : 34, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt', },", "'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : { 'number' : 277, 'flags'", "'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : { 'arbnumber' : 105, 'flags' : {", "156, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : {", "'GLX_SGIS_color_range' : { 'number' : 115, 'flags' : { 'public' }, 'supporters' :", ": { 'arbnumber' : 144, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt',", "'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture' : { 'number' : 4,", "'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : { 'esnumber' : 173, 'flags'", "'esnumber' : 49, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA'", ": { 'number' : 189, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' : 250, 'flags' : { 'public'", "{ 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : { 'esnumber' :", "'GL_MESAX_texture_stack' : { 'number' : 318, 'flags' : { 'public' }, 'supporters' :", "360, 'esnumber' : 50, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : { 'number' : 392, 'flags' : {", "{ 'arbnumber' : 128, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', },", ": { 'arbnumber' : 51, 'flags' : { 'public' }, 'supporters' : {", "'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : { 'esnumber' : 93, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' :", "163, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' :", "'esnumber' : 126, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_ARB_multitexture' : { 'arbnumber' : 1, 'flags' : { 'public' }, 'supporters'", "'GL_QCOM_texture_foveated' : { 'esnumber' : 293, 'flags' : { 'public' }, 'url' :", "'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : { 'number' : 264, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : { 'number' : 393, 'flags' :", ": { 'number' : 441, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : { 'number'", ": 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : { 'number' : 294, 'esnumber' : 6, 'flags'", "{ 'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : { 'esnumber' : 34,", ": { 'number' : 279, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : {", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : { 'arbnumber' : 18, 'flags'", "{ 'number' : 453, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', },", "'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : { 'number' : 345, 'flags' : {", "}, 'GLX_SGIS_blended_overlay' : { 'number' : 142, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'INGR', 'NVIDIA' }, 'url' :", ": { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber'", "'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : { 'number' : 224, 'flags'", "{ 'number' : 6, 'flags' : { 'public' }, 'supporters' : { 'ES',", "'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : { 'esnumber' : 238, 'flags'", "'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : { 'arbnumber' : 141, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'IBM', 'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt',", "169, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : {", ": 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : { 'esnumber' : 171, 'flags' : { 'public'", "{ 'number' : 478, 'esnumber' : 241, 'flags' : { 'public' }, 'url'", ": { 'number' : 31, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt',", "'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : { 'number' : 76, 'flags' : {", ": 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture' : { 'number' :", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber'", "}, 'GL_EXT_shadow_funcs' : { 'number' : 267, 'flags' : { 'public' }, 'supporters'", ": 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : { 'number' : 308, 'flags' : { 'public'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : { 'number' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : {", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : { 'number'", "{ 'arbnumber' : 38, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : { 'arbnumber' : 170, 'esnumber' :", ": { 'esnumber' : 134, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt',", "'GL_SGIX_complex_polar' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : { 'number'", ": { 'esnumber' : 78, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt',", "'number' : 253, 'flags' : { 'public' }, 'supporters' : { 'I3D' },", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' :", "}, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : { 'number' : 520, 'esnumber' :", ": 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' : 58, 'flags' : { 'public'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : { 'number'", "'arbnumber' : 132, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment'", "{ 'esnumber' : 238, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt', },", "}, 'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : { 'number' : 470, 'esnumber' :", ": 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : { 'arbnumber' : 17, 'flags' : { 'public'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : { 'number' : 309, 'esnumber'", "'esnumber' : 141, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture'", "'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : { 'flags' : { 'incomplete' }, 'url'", "'esnumber' : 105, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control'", "'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : { 'number' : 191, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', },", "'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : { 'esnumber' : 28, 'flags'", "502, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'arbnumber' : 91, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier'", "'number' : 82, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'GL_ARB_shader_bit_encoding' : { 'arbnumber' : 82, 'flags' : { 'public' }, 'url' :", "{ 'number' : 505, 'esnumber' : 282, 'flags' : { 'public' }, 'url'", "'number' : 170, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI'", "}, 'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : { 'number' : 303, 'flags' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : {", "'GL_EXT_robustness' : { 'esnumber' : 107, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control'", "'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : { 'esnumber' : 135, 'flags' : { 'public' },", "'WGL_ARB_create_context_robustness' : { 'arbnumber' : 102, 'flags' : { 'public' }, 'url' :", "{ 'esnumber' : 83, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', },", "'arbnumber' : 166, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod'", ": { 'esnumber' : 33, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil8.txt',", "}, 'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : { 'number' : 156, 'flags' :", "}, 'GL_EXT_texenv_op' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : { 'number'", "{ 'esnumber' : 3, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt', },", ": 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : { 'arbnumber' : 66, 'flags' : { 'public'", "'arbnumber' : 142, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : {", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' :", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : { 'number' :", "'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : { 'number' : 401, 'flags'", "extension spec with WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' : {", "}, 'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : { 'esnumber' : 203, 'flags' :", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance'", "}, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : { 'esnumber' : 269, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt',", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt',", "}, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' :", ": 370, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "}, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : { 'number' : 256, 'flags' :", "'esnumber' : 63, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode'", "}, 'GL_NV_sRGB_formats' : { 'esnumber' : 148, 'flags' : { 'public' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : { 'flags' : { 'incomplete'", "{ 'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : { 'number' : 262,", "'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : { 'number' : 280, 'flags' : {", ": 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : { 'number' : 225, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber' :", "'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : { 'number' : 377,", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : { 'number'", ": 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : { 'esnumber' : 270, 'flags' : { 'public'", ": 260, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : {", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : { 'number' : 161,", "'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : { 'number' : 329, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : { 'number' : 430, 'esnumber' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt',", ": { 'esnumber' : 56, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt',", "{ 'number' : 175, 'flags' : { 'public' }, 'supporters' : { 'INGR'", "{ 'number' : 489, 'esnumber' : 296, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge'", "}, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : { 'arbnumber' : 76, 'flags' :", ": { 'esnumber' : 12, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt',", "'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : { 'number' : 323, 'flags'", ": 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : { 'number' : 376, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : { 'esnumber' : 3, 'flags'", "'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : { 'number' : 133, 'flags' : {", "'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : { 'number' : 226, 'flags' : {", ": 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' : { 'esnumber' :", "'Supported on Visual Workstation 320 / 540 only.', }, 'GL_SGIX_ycrcba' : { 'number'", "'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : { 'esnumber' : 134, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : { 'esnumber'", "{ 'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : { 'number' : 14,", "'GL_NV_polygon_mode' : { 'esnumber' : 238, 'flags' : { 'public' }, 'url' :", ": 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : { 'number' : 386, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' :", "}, 'supporters' : { 'DEC', 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' :", "'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : { 'number' : 197, 'flags' : {", "'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber' : 201, 'flags' : { 'public' },", "380, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : { 'esnumber' : 192, 'flags' : { 'public'", "337, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'number' : 431, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' : { 'esnumber' : 72, 'flags' : {", "}, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : { 'esnumber' : 160, 'flags' :", "'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : { 'number' : 75, 'flags'", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling'", ": 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : { 'number' : 460, 'esnumber' : 252, 'flags'", ": 190, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' :", "'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : { 'esnumber' : 267, 'flags' : {", "64, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : { 'number' : 12,", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', },", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : { 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' :", "254, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : {", ": { 'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : { 'number' :", ": 403, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : { 'number' : 237,", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt',", "}, 'GL_EXT_cmyka' : { 'number' : 18, 'flags' : { 'public' }, 'supporters'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : { 'arbnumber' : 24,", "'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : { 'number' : 247, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : { 'arbnumber' : 111, 'flags' :", "'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : { 'number' : 262, 'flags' : {", "'number' : 136, 'flags' : { 'public' }, 'supporters' : { 'INTEL' },", "'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : { 'esnumber' : 45, 'flags' : {", "{ 'APPLE', 'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : { 'number' :", "110, 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' :", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : { 'esnumber' : 195,", "'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' : 298, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : { 'number' : 403, 'flags' :", ": 203, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'esnumber' : 52, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_APPLE_transform_hint' : { 'number' : 160, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : { 'arbnumber'", ": { 'esnumber' : 121, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt',", "}, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber' : 167, 'flags' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : { 'esnumber'", ": 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' : 140, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : { 'esnumber' : 220,", "'arbnumber' : 175, 'esnumber' : 243, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : { 'number' : 134, 'flags' : {", "239, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : {", ": 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : { 'arbnumber' : 127, 'flags' : { 'public'", "'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' : 96, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample'", "}, 'GL_MESA_ycbcr_texture' : { 'number' : 301, 'flags' : { 'public' }, 'supporters'", "188, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : {", "'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' : 492, 'esnumber' : 266, 'flags' :", "'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : { 'number' : 331,", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt', },", "'esnumber' : 211, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer'", "{ 'number' : 99, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : { 'esnumber' : 7, 'flags' : {", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : { 'number' :", ": { 'number' : 407, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' : 182, 'flags'", "}, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : { 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : { 'number' : 148,", "'esnumber' : 25, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture'", "}, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : {", ": { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : { 'number'", "'GLX_SGIX_color_type' : { 'number' : 89, 'flags' : { 'incomplete' }, 'supporters' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile' },", "'supporters' : { 'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : { 'number'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : { 'esnumber'", "136, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : {", "}, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness'", "'arbnumber' : 109, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query'", ": { 'incomplete', 'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension which", "}, 'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : { 'esnumber' : 86, 'flags' :", ": { 'number' : 319, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 155, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample'", ": 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : { 'number' : 464, 'esnumber' : 227, 'flags'", "}, 'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : { 'number' : 281, 'flags' :", ": { 'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : { 'esnumber' :", "'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : { 'esnumber' : 62, 'flags'", "'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : { 'number' : 270, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', },", "'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt',", "{ 'arbnumber' : 14, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : { 'arbnumber' : 146, 'flags' : { 'public' },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : {", "'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' : 86, 'flags' : { 'public' }, 'url' :", "}, 'GL_ATI_texture_mirror_once' : { 'number' : 221, 'flags' : { 'public' }, 'supporters'", "'number' : 53, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "{ 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : { 'number' : 474,", ": 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : { 'number' : 196, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : { 'arbnumber' : 183,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint'", ": 53, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : { 'number'", "'arbnumber' : 156, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote'", ": { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : { 'flags'", ": 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : { 'number' : 230, 'flags' : { 'public'", "}, 'GL_OES_texture_view' : { 'esnumber' : 218, 'flags' : { 'public' }, 'url'", ": 284, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : { 'esnumber'", ": 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : { 'arbnumber' : 2, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' : 487, 'esnumber'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : { 'arbnumber' :", ": 311, 'flags' : { 'public' }, 'supporters' : { 'GREMEDY' }, 'url'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' :", "{ 'number' : 13, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'GL_REND_screen_coordinates' : { 'number' : 155, 'flags' : { 'public' }, 'supporters'", ": 12, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : { 'number'", "'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : { 'arbnumber' : 133, 'flags' : {", "'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber' : 176, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : { 'number' : 386, 'flags' : {", "'esnumber' : 234, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object'", "'number' : 467, 'esnumber' : 229, 'flags' : { 'public' }, 'url' :", ": 513, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'number' : 389, 'esnumber' : 260, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : { 'arbnumber' : 113, 'flags'", "}, 'GL_NV_register_combiners2' : { 'number' : 227, 'flags' : { 'public' }, 'supporters'", ": 247, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'GL_APPLE_texture_max_level' : { 'esnumber' : 80, 'flags' : { 'public' }, 'url' :", "}, 'GL_ARB_draw_instanced' : { 'arbnumber' : 44, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : { 'number' : 427, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : { 'esnumber' : 182, 'flags' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : { 'esnumber' : 127, 'flags'", ": 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : { 'number' : 265, 'flags' : { 'public'", "'esnumber' : 14, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite'", "'number' : 57, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : { 'number' : 54, 'flags' : { 'public' },", ": 1, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias' :", ": { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : { 'esnumber' :", "'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : { 'esnumber' : 103, 'flags' : {", "'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : { 'number' : 442, 'flags' : { 'public' },", ": 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' : { 'arbnumber' :", "'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : { 'number' : 311, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : { 'arbnumber'", ": { 'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : { 'esnumber' :", "{ 'incomplete', 'private' }, 'comments' : 'Draft spec location unknown.', }, 'GL_OES_point_size_array' :", "'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : { 'number' : 82, 'flags' : {", "'esnumber' : 148, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations'", "'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : { 'arbnumber' : 128, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : { 'esnumber' : 116,", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber' :", "7, 'flags' : { 'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url'", "'supporters' : { 'GOOGLE', 'NVIDIA', 'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' :", "}, 'GL_ARB_point_parameters' : { 'arbnumber' : 14, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : { 'number' : 387, 'flags' : {", ": { 'number' : 385, 'flags' : { 'public' }, 'supporters' : {", ": 452, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' :", "'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : { 'number' : 347, 'flags'", "}, 'GL_EXT_texture_storage' : { 'esnumber' : 108, 'flags' : { 'public' }, 'url'", "'number' : 1, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'KGC',", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' : 276,", "}, 'GL_NV_vdpau_interop' : { 'number' : 396, 'flags' : { 'public' }, 'supporters'", ": 199, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'number' : 51, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": { 'esnumber' : 15, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt',", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt',", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : { 'number'", ": { 'arbnumber' : 29, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', },", "}, 'GL_NV_copy_image' : { 'number' : 376, 'flags' : { 'public' }, 'supporters'", "'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : { 'number' : 125, 'flags' : { 'incomplete' },", ": { 'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : { 'arbnumber' :", "}, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : { 'esnumber' : 178, 'flags' :", "{ 'esnumber' : 107, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt', },", ": 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : { 'number' : 211, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt',", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', },", "}, 'WGL_NV_swap_group' : { 'number' : 351, 'flags' : { 'public' }, 'supporters'", "'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : { 'number' : 467, 'esnumber' : 229, 'flags' :", ": 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : { 'number' : 14, 'flags' : { 'public'", ": 79, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' :", "522, 'esnumber' : 301, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', },", "{ 'number' : 148, 'esnumber' : 69, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : { 'number' :", "'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : { 'number' : 474, 'esnumber'", ": { 'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : { 'esnumber' :", "}, 'WGL_ARB_create_context' : { 'arbnumber' : 55, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 317, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt',", "'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' : 189, 'esnumber' : 249, 'flags' : { 'public'", "446, 'flags' : { 'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : {", "{ 'esnumber' : 253, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : { 'number' : 466,", "'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : { 'esnumber' : 239, 'flags'", ": { 'esnumber' : 264, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt',", "}, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : { 'flags' : { 'incomplete' },", ": 23, 'flags' : { 'public' }, 'supporters' : { 'ES', 'INGR', 'SGI'", "'GL_EXT_depth_bounds_test' : { 'number' : 297, 'flags' : { 'public' }, 'supporters' :", "'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' : 289, 'flags' : { 'public' },", ": { 'number' : 312, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt',", "'GL_SGIX_subdiv_patch' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample'", ": 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : { 'arbnumber' : 179, 'flags' : { 'public'", ": 227, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : { 'number' : 91,", "}, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : { 'arbnumber' : 51, 'flags' :", "'GL_ARB_half_float_pixel' : { 'arbnumber' : 40, 'flags' : { 'public' }, 'supporters' :", "'incomplete' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' :", "'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : { 'esnumber' : 120, 'flags' : { 'public' },", ": { 'number' : 519, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : { 'arbnumber' : 48, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : { 'arbnumber' : 7, 'flags' :", ": 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : { 'number' : 152, 'flags' : { 'incomplete'", "'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : { 'number' : 292, 'esnumber'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : { 'number' : 340,", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : {", "'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : { 'arbnumber' : 139, 'flags' : { 'public' },", ": 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' : { 'number' :", "'arbnumber' : 24, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : {", "'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : { 'esnumber' : 210, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview'", "}, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' : 277, 'flags' :", "'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : { 'number' : 271, 'flags' : { 'public' },", "{ 'arbnumber' : 135, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', },", "'GL_EXT_multiview_draw_buffers' : { 'esnumber' : 125, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : { 'number' : 484, 'flags' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : {", "{ 'esnumber' : 92, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', },", ": 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url'", ": { 'arbnumber' : 149, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt',", "'public' }, 'supporters' : { '3DL', 'ATI', 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt',", "}, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : { 'arbnumber' : 131, 'flags' :", "'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' : 185, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : { 'esnumber'", ": { 'arbnumber' : 41, 'flags' : { 'public' }, 'supporters' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : { 'number' : 464, 'esnumber'", "{ 'public' }, 'supporters' : { '3DL', 'ATI', 'INTEL', 'NVIDIA' }, 'url' :", "}, 'GL_ANGLE_framebuffer_blit' : { 'esnumber' : 83, 'flags' : { 'public' }, 'url'", "179, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : {", "'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : { 'esnumber' : 113, 'flags' : { 'public' },", "'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt', },", "'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt',", ": { 'number' : 206, 'flags' : { 'public' }, 'supporters' : {", ": 32, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' :", "}, 'GL_NV_texture_compression_s3tc' : { 'esnumber' : 128, 'flags' : { 'public' }, 'url'", "{ 'number' : 141, 'flags' : { 'public' }, 'supporters' : { '3DFX',", "'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber' : 51, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt',", "{ 'esnumber' : 239, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', },", ": 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : { 'number' : 453, 'flags' : { 'public'", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : { 'number' : 359,", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', },", ": 251, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' :", "198, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'number' : 345, 'flags' : { 'public' }, 'supporters' : { 'GREMEDY'", "'esnumber' : 121, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute'", "'GL_NV_copy_image' : { 'number' : 376, 'flags' : { 'public' }, 'supporters' :", "}, 'GLX_EXT_create_context_es2_profile' : { 'number' : 399, 'flags' : { 'public' }, 'supporters'", ": 214, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : { 'number' :", "354, 'flags' : { 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming'", ": { 'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : { 'arbnumber' :", ": 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : { 'flags' : { 'incomplete' }, 'url' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : { 'flags' :", ": 172, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer' },", ": 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : { 'number' : 501, 'esnumber' : 274, 'flags'", "}, 'GL_IMG_bindless_texture' : { 'esnumber' : 270, 'flags' : { 'public' }, 'url'", ": { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : { 'number' :", "}, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : { 'number' : 404, 'flags' :", "}, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' :", "'GL_SGIX_bali_timer_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax'", "125, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GL_EXT_color_buffer_float' : { 'esnumber' : 137, 'flags' : { 'public' }, 'url'", "'number' : 259, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'esnumber' : 83, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt',", ": 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : { 'arbnumber' : 72, 'flags' : { 'public'", "arbnumber 75.', }, 'GLX_ARB_create_context_no_error' : { 'arbnumber' : 191, 'flags' : { 'public'", "'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : { 'number' : 69, 'flags' : { 'public' },", "'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to WGL_ARB_create_context_profile not needed - see arbnumber 74.', },", "9, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : { 'arbnumber' : 96, 'flags'", ": { 'number' : 468, 'esnumber' : 230, 'flags' : { 'public' },", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : {", "218, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : {", "}, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' : 21, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : {", "{ 'number' : 516, 'esnumber' : 294, 'flags' : { 'public' }, 'supporters'", ": 432, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_DMP_program_binary' : { 'esnumber' : 192, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' :", "350, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : { 'arbnumber' :", "'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : { 'number' : 342, 'flags' : { 'public' },", "'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : { 'number' : 384, 'flags' : {", "'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : { 'number' : 263, 'flags' : { 'public' },", "'arbnumber' : 31, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : { 'esnumber' :", "182, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' :", ": 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped but was not fully specified. Similar to", ": { 'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : { 'esnumber' :", "'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : { 'flags' : { 'incomplete' }, 'url'", ": 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : { 'esnumber' : 195, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : { 'esnumber' : 255, 'flags'", "}, 'GL_ARB_parallel_shader_compile' : { 'arbnumber' : 179, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : { 'esnumber' : 128, 'flags' :", ": 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : { 'flags' : { 'incomplete' }, 'url' :", ": 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : { 'number' : 257, 'flags' : { 'public'", "'GL_SGI_texture_color_table' : { 'number' : 17, 'flags' : { 'public' }, 'supporters' :", "'alias' : { 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' : 189, 'esnumber'", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt', },", "}, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : { 'arbnumber' : 17, 'flags' :", "'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : { 'number' : 465, 'esnumber' : 228,", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : { 'number'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : {", "}, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' : 492, 'esnumber' : 266, 'flags' : {", ": 93, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : {", ": 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' : 276, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : { 'esnumber' : 130, 'flags' :", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : { 'number'", "not fully specified. Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : { 'esnumber' : 184,", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : { 'number' : 344,", "}, 'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : { 'number' : 381, 'esnumber' :", "{ 'number' : 438, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'GL_SGIS_texture4D' : { 'number' : 16, 'flags' : { 'public' }, 'supporters'", ": 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : { 'flags' : { 'incomplete' }, 'url' :", "'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : { 'arbnumber' : 162, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic'", "}, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' : 275, 'flags' :", "{ 'esnumber' : 272, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', },", "130, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : {", ": { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : { 'number'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : { 'number' :", "{ 'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : { 'number' : 491,", "'number' : 489, 'esnumber' : 296, 'flags' : { 'public' }, 'url' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : { 'flags' : { 'public'", "'GL_OES_query_matrix' : { 'number' : 296, 'esnumber' : 16, 'flags' : { 'public'", ": { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : { 'number' :", "'public' }, 'supporters' : { 'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' :", ": 276, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : {", ": { 'number' : 378, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 155, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', },", "{ 'number' : 350, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 4, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC',", "'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : { 'flags' : { 'public' }, 'url'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : { 'number' :", ": 192, 'esnumber' : 288, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt',", "'number' : 502, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "}, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' : {", "'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' :", "same name string.', }, 'GL_EXT_separate_specular_color' : { 'number' : 144, 'flags' : {", "}, 'GL_SUN_vertex' : { 'number' : 166, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : { 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' : 62,", "}, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : { 'esnumber' : 20, 'flags' :", "{ 'number' : 142, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "only.', }, 'GL_SGIX_ycrcba' : { 'number' : 203, 'flags' : { 'incomplete' },", "'incomplete', 'private' }, 'comments' : 'Draft spec location unknown.', }, 'GL_OES_point_size_array' : {", ": 72, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' :", ": 151, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", ": { 'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : { 'number' :", "'GL_SGIX_ir_instrument1' : { 'number' : 81, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : { 'arbnumber'", "59, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", ": 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' : 149, 'flags' : { 'public'", "{ 'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : { 'number' : 42,", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer'", "'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : { 'arbnumber' : 50, 'flags' : {", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit'", "'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : { 'arbnumber' : 163, 'flags'", "{ 'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : { 'esnumber' : 248,", ": 287, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'public' }, 'supporters' : { 'IBM', 'IMG', 'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias'", ": 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : { 'number' : 341, 'flags' : { 'public'", "'esnumber' : 250, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints'", "}, 'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask'", "'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : { 'number' : 97, 'flags' : {", ": 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : { 'esnumber' : 23, 'flags' : { 'public'", ": { 'esnumber' : 88, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt',", "'GL_EXT_direct_state_access' : { 'number' : 353, 'flags' : { 'public' }, 'supporters' :", "'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : { 'number' : 64,", "'GL_APPLE_object_purgeable' : { 'number' : 371, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : { 'number' : 222, 'esnumber' : 52, 'flags' :", "'arbnumber' : 136, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt',", ": 303, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : { 'arbnumber' : 68, 'flags' : {", "{ 'esnumber' : 53, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' :", "}, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' :", ": { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : { 'number' :", "'GL_SGIX_async' : { 'number' : 132, 'flags' : { 'incomplete', 'public' }, 'supporters'", "130, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : {", ": { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation' }, },", "'number' : 432, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : { 'number' : 441,", "110, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : {", "'ATI', 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : { 'number' :", "{ 'esnumber' : 289, 'flags' : { 'public' }, 'supporters' : { 'ANGLE'", "'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : { 'arbnumber' : 167, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt',", "}, 'GLX_NV_swap_group' : { 'number' : 350, 'flags' : { 'public' }, 'supporters'", "{ 'esnumber' : 29, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt', },", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt', },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : { 'number' : 260,", "'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : { 'number' : 459, 'flags' : {", ": { 'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : { 'number' :", "}, 'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : { 'esnumber' : 30, 'flags' :", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt',", ": { 'number' : 466, 'esnumber' : 232, 'flags' : { 'public' },", ": { 'arbnumber' : 1, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : { 'esnumber' : 268, 'flags' : {", "{ 'number' : 215, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "}, 'GL_SGIS_shared_multisample' : { 'number' : 143, 'flags' : { 'incomplete' }, 'supporters'", "'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : { 'number'", "'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : { 'number' : 366, 'flags'", "149, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : {", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : { 'number' : 15, 'flags'", "'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : { 'number' : 16, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : { 'number' : 18,", "{ 'number' : 226, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : { 'arbnumber' : 136, 'flags' : { 'public'", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : { 'number'", "'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : { 'number' : 489, 'esnumber' : 296, 'flags' :", "'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : { 'flags'", ": { 'public' }, 'supporters' : { 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt',", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : { 'number'", ": { 'arbnumber' : 138, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt',", "'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' : 166, 'flags' : {", "191, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : { 'arbnumber' : 40, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number' : 456, 'flags'", "12, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : {", ": 149, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' :", "'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : { 'arbnumber' : 194, 'flags' : { 'public' },", "}, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : { 'number' : 183, 'flags' :", "}, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : {", "'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' : { 'esnumber' : 224,", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber'", ": { 'number' : 170, 'flags' : { 'public' }, 'supporters' : {", "'GL_EXT_stencil_two_side' : { 'number' : 268, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : { 'number' : 61, 'flags' : {", ": 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : { 'number' : 414, 'flags' : { 'public'", "}, 'GL_AMD_framebuffer_sample_positions' : { 'number' : 454, 'flags' : { 'public' }, 'url'", "{ 'number' : 32, 'flags' : { 'public' }, 'supporters' : { 'HP',", "}, 'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : { 'esnumber' : 136, 'flags' :", ": 151, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' :", ": { 'number' : 91, 'flags' : { 'public' }, 'supporters' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber' : 114, 'flags'", ": { 'number' : 451, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt',", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : { 'number' :", "{ 'arbnumber' : 96, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', },", "'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : { 'number'", ": { 'number' : 384, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 86, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' :", ": { 'esnumber' : 79, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt',", "}, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : { 'number' : 140, 'flags' :", "310, 'flags' : { 'public' }, 'supporters' : { '3DL', 'ATI', 'INTEL', 'NVIDIA'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' :", "'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : { 'esnumber' : 87, 'flags'", ": { 'arbnumber' : 117, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt',", ": 84, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' :", "'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : { 'number' : 453, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' :", "{ 'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : { 'esnumber' : 28,", "'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : { 'esnumber' : 217, 'flags' : {", "}, 'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : { 'esnumber' : 214, 'flags' :", "extension number 15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : { 'number' : 127, 'flags'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : { 'number' : 304,", "'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : { 'number' : 236, 'flags' : { 'incomplete' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : { 'arbnumber' : 188,", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : { 'number' : 358,", "'GL_ARB_sync' : { 'arbnumber' : 66, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : {", ": 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url'", ": 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : { 'arbnumber' : 82, 'flags' : { 'public'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : { 'arbnumber'", "18, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": 249, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt',", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', },", "}, 'GL_ARB_shader_ballot' : { 'arbnumber' : 183, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : { 'esnumber' : 186, 'flags' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : { 'flags' :", "}, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' : 173, 'flags' :", "'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : { 'number' : 289, 'flags' : { 'public' },", "'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : { 'number' : 343, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : { 'number' : 429, 'flags'", "'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' : 5, 'flags' : {", ": 46, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' :", "'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt',", "74, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : {", "'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : { 'esnumber' : 75, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt',", ": { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt',", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber'", "}, 'GL_SGIS_clip_band_hint' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', },", "}, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : { 'esnumber' : 213, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : { 'arbnumber' : 42,", ": 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : { 'esnumber' : 149, 'flags' : { 'public'", "'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : { 'number' : 238, 'flags' : { 'public' },", "'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : { 'esnumber' : 244, 'flags' : { 'incomplete', 'private'", "'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : { 'esnumber' : 203, 'flags'", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : { 'esnumber' :", "}, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' : 86, 'flags' :", ": 230, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": 520, 'esnumber' : 122, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt',", "'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : { 'number' : 153,", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : {", ": 29, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": 146, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' :", ": 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : { 'number' : 392, 'flags' : { 'public'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : { 'number' : 283, 'flags'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' :", "}, 'supporters' : { 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' :", "'GL_PGI_vertex_hints' : { 'number' : 76, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : {", "'HP', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : {", "'GL_QCOM_extended_get2' : { 'esnumber' : 63, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test'", "'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : { 'esnumber' : 150, 'flags' : {", "{ 'esnumber' : 97, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', },", "{ 'number' : 91, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : { 'number' : 415, 'flags' :", "'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : { 'number' : 457, 'flags' : {", "'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : { 'number' : 436, 'flags' : {", "'esnumber' : 112, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source'", ": { 'arbnumber' : 177, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt',", "'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : { 'arbnumber' : 123, 'flags' : {", ": { 'number' : 197, 'flags' : { 'public' }, 'supporters' : {", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : { 'number' : 369, 'flags'", "'GL_EXT_shader_image_load_store' : { 'number' : 386, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' :", ": 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : { 'esnumber' : 127, 'flags' : { 'public'", ": { 'arbnumber' : 48, 'flags' : { 'public' }, 'supporters' : {", "448, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : {", "'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : { 'number' : 25, 'flags' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : { 'arbnumber'", "}, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' : 140, 'flags' : { 'public' }, 'url'", "{ 'number' : 434, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : { 'number' :", "{ 'number' : 46, 'flags' : { 'obsolete' }, 'supporters' : { 'SGI'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : { 'arbnumber' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' :", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : { 'number' :", "}, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : { 'number' : 296, 'esnumber' :", "'GL_NV_shader_noperspective_interpolation' : { 'esnumber' : 201, 'flags' : { 'public' }, 'url' :", ": { 'number' : 10, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 55, 'flags' : { 'public' }, 'supporters' : {", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' :", "'GL_3DFX_multisample' : { 'number' : 207, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' : 487, 'esnumber' : 262,", "'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : { 'esnumber' : 78, 'flags' : { 'public' },", "}, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber' : 177, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', },", "{ 'number' : 316, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', },", "'GL_SGIX_nurbs_eval' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument'", "}, 'supporters' : { 'ES', 'HP', 'IBM', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt',", "'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : { 'number' : 444, 'flags' : {", "}, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' : 173, 'flags' : { 'public' }, 'url'", ": { 'number' : 232, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber' : 223, 'flags'", "'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : { 'arbnumber' : 148, 'flags' : {", "'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : { 'number' : 99, 'flags' : {", "'GL_ARB_depth_buffer_float' : { 'arbnumber' : 43, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber' : 187,", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : { 'number'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' :", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' :", "'number' : 425, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' : { 'number' : 72, 'flags' : { 'incomplete'", "'number' : 396, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : { 'number' :", ": 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : { 'number' : 128, 'flags' : { 'incomplete'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp'", "'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : { 'number' : 217, 'flags' : { 'public' },", ": { 'esnumber' : 140, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt',", "}, 'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : { 'number' : 62, 'flags' :", "{ 'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped but was not", "{ 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' : { 'number' : 324, 'flags' : {", "'arbnumber' : 89, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64'", ": { 'esnumber' : 247, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt',", "213, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : { 'arbnumber' : 124, 'flags'", "'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : { 'arbnumber' : 28, 'flags'", "}, 'GL_EXT_sRGB_write_control' : { 'esnumber' : 153, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : { 'esnumber' : 141,", "'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber' : 167, 'flags' : { 'public' },", ": 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : { 'esnumber' : 86, 'flags' : { 'public'", "}, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber' : 114, 'flags' : { 'public' }, 'url'", "'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : { 'number' : 162, 'flags' : { 'incomplete' },", ": 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : { 'number' : 489, 'esnumber' : 296, 'flags'", "}, 'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : { 'number' : 509, 'flags' :", "'number' : 182, 'flags' : { 'public' }, 'supporters' : { 'SUN' },", ": { 'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : { 'number' :", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', },", "'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select'", "'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : { 'number' : 283, 'flags' : {", "{ 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', },", "{ 'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : { 'esnumber' : 88,", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object' }, },", "}, 'GL_SGIX_framezoom' : { 'number' : 57, 'flags' : { 'public' }, 'supporters'", ": 122, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : {", "'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' : { 'number' : 348, 'flags' : {", "}, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : { 'number' : 201, 'flags' :", ": 216, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "36, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'SGI' },", "{ 'number' : 214, 'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt',", ": 507, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": { 'number' : 46, 'flags' : { 'obsolete' }, 'supporters' : {", "}, 'GL_NV_depth_buffer_float' : { 'number' : 334, 'flags' : { 'public' }, 'supporters'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : { 'number' : 260, 'flags'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : { 'arbnumber'", "'arbnumber' : 153, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted'", "347, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : { 'arbnumber' :", "'esnumber' : 171, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks'", ": { 'esnumber' : 183, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt',", "'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : { 'number' : 424, 'flags' : {", "'GL_NV_vertex_array_range2' : { 'number' : 232, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : { 'esnumber' : 216, 'flags' : { 'public'", "'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : { 'esnumber' : 33, 'flags' : { 'public' },", ": 12, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' :", "'GL_NV_fence' : { 'number' : 222, 'esnumber' : 52, 'flags' : { 'public'", ": 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : { 'number' : 165, 'flags' : { 'public'", ": { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' :", "}, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : { 'number' : 323, 'flags' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt', },", ": { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt',", "}, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : { 'esnumber' : 267, 'flags' :", "}, 'GL_SGI_complex' : { 'number' : 87, 'flags' : { 'incomplete' }, 'supporters'", "}, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : { 'number' : 362, 'flags' :", ": { 'IBM', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' :", ": 167, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' :", ": { 'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : { 'number' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt', },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : {", "'arbnumber' : 146, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : {", "}, 'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : { 'number' : 129, 'flags' :", "'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : { 'esnumber' : 127, 'flags' : {", ": { 'number' : 464, 'esnumber' : 227, 'flags' : { 'public' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : { 'number'", ": { 'number' : 301, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 169, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables'", "'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : { 'number' : 460,", "'arbnumber' : 52, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": 28, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "163, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 58, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : { 'number' : 402, 'esnumber' : 152, 'flags' :", "'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : { 'number' : 230, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : { 'number' :", ": { 'number' : 324, 'flags' : { 'public' }, 'supporters' : {", "'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : { 'number' : 496, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : { 'number' : 482,", ": 282, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : {", ": 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : { 'number' : 437, 'esnumber' : 161, 'flags'", "}, 'GL_NV_bindless_multi_draw_indirect' : { 'number' : 432, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : { 'number'", "'number' : 84, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "{ 'arbnumber' : 176, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', },", "{ 'number' : 279, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'GL_EXT_shadow_samplers' : { 'esnumber' : 102, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : {", "'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : { 'number' : 416, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : { 'number' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : { 'number' :", "'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt',", ": { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : { 'number' :", "'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' : { 'arbnumber'", "}, 'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : { 'number' : 265, 'flags' :", ": { 'number' : 49, 'flags' : { 'public' }, 'supporters' : {", "'arbnumber' : 164, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : {", "'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : { 'number' : 284, 'flags' : { 'public' },", ": { 'number' : 511, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt',", "'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : { 'number' : 425, 'flags' : { 'public' },", ": { 'esnumber' : 57, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt',", ": { 'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : { 'number' :", "'GL_EXT_memory_object' : { 'number' : 503, 'esnumber' : 280, 'flags' : { 'public'", "150, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : {", ": { 'number' : 438, 'flags' : { 'public' }, 'supporters' : {", "6, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that the OpenGL", ": 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : { 'number' : 400, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : { 'arbnumber' : 82, 'flags'", "'GL_SGIX_shadow_ambient' : { 'number' : 90, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : { 'arbnumber' : 106, 'flags' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : { 'number' :", "'number' : 86, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "}, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber' : 194, 'flags' : { 'public' }, 'url'", "{ 'number' : 364, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA',", "}, 'GL_IGLOO_viewport_offsetXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt',", "'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' : 58, 'flags' : {", "'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : { 'esnumber' : 245, 'flags' : {", "'number' : 61, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : { 'number' : 468, 'esnumber' : 230, 'flags' :", ": 351, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_AMD_name_gen_delete' : { 'number' : 394, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 321, 'flags' : { 'public' }, 'supporters' : {", "371, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "}, 'GL_ARB_cl_event' : { 'arbnumber' : 103, 'flags' : { 'public' }, 'url'", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : { 'number'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt',", "'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : { 'number' : 362, 'flags' : {", "'alias' : { 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' : { 'number' : 505, 'esnumber'", "}, 'GL_NV_clip_space_w_scaling' : { 'number' : 486, 'esnumber' : 295, 'flags' : {", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh'", "{ 'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation'", "'GL_ARB_draw_instanced' : { 'arbnumber' : 44, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : { 'arbnumber' : 54, 'flags'", "}, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : { 'number' : 214, 'flags' :", "'GL_KHR_parallel_shader_compile' : { 'arbnumber' : 192, 'esnumber' : 288, 'flags' : { 'public'", ": 300, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : { 'number' : 345, 'flags' : { 'public' },", "'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber' : 89, 'flags' : { 'public' },", "'GL_NV_multisample_coverage' : { 'number' : 393, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : { 'number' : 64, 'flags' : {", "}, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : { 'arbnumber' : 10, 'flags' :", "'GL_HP_convolution_border_modes' : { 'number' : 67, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : { 'number' : 186, 'esnumber' : 60,", "'number' : 252, 'flags' : { 'public' }, 'supporters' : { 'I3D' },", "'supporters' : { 'IBM', 'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : {", "}, 'GL_NV_gpu_program4' : { 'number' : 322, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt',", "139, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : {", "}, 'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : { 'number' : 414, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : { 'esnumber'", "'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : { 'number' : 518, 'flags' : { 'public' },", ": 42, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' :", ": 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber' : 84, 'flags' : { 'public'", "{ 'esnumber' : 125, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', },", "}, 'GL_NV_draw_buffers' : { 'esnumber' : 91, 'flags' : { 'public' }, 'url'", "'esnumber' : 174, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view'", "'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : { 'esnumber' : 195, 'flags'", "'GL_OES_point_size_array' : { 'esnumber' : 14, 'flags' : { 'public' }, 'url' :", "{ 'number' : 372, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", ": { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : { 'number'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number' :", "'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : { 'number' : 309, 'esnumber' : 49,", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : { 'esnumber'", "}, 'GL_EXT_vertex_attrib_64bit' : { 'number' : 387, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : { 'number'", "'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : { 'number' : 40, 'flags'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber'", ": { 'public' }, 'supporters' : { 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt',", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : {", "'GLX_ARB_create_context_profile' : { 'arbnumber' : 75, 'flags' : { 'public' }, 'url' :", ": { 'number' : 357, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : { 'number' : 319, 'flags' : { 'public'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : { 'number' : 259, 'flags'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : { 'number' : 202,", ": 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : { 'arbnumber' : 154, 'flags' : { 'public'", ": { 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', },", ": 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' : 165, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : { 'number' : 242, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : { 'esnumber'", "'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : { 'number' : 264, 'flags' : {", ": 21, 'flags' : { 'public' }, 'supporters' : { 'KGC', 'SGI' },", "'number' : 69, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'GL_ARB_enhanced_layouts' : { 'arbnumber' : 146, 'flags' : { 'public' }, 'url' :", "'GL_QCOM_binning_control' : { 'esnumber' : 119, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : {", "{ 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' : { 'number' : 21, 'flags' : {", "161, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : {", ": 236, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : { 'esnumber'", "'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' : { 'arbnumber' : 181,", ": 39, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' :", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt',", "'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : { 'number' : 333, 'flags' : {", "'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' : 492, 'esnumber' : 266,", "{ 'number' : 199, 'flags' : { 'public' }, 'supporters' : { 'IBM'", ": { 'number' : 284, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_ANGLE_texture_usage' : { 'esnumber' : 112, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : { 'esnumber' : 87,", "425, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into EXT_texture_env_combine.',", "'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : { 'arbnumber' : 140, 'flags' : { 'public' },", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : {", "'number' : 97, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI'", "24, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' : { 'number' : 505,", ": 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : { 'number' : 355, 'flags' : { 'public'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : {", ": 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber' : 142, 'flags' : { 'public'", "'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : { 'arbnumber' : 163, 'flags' : { 'public' },", "{ 'number' : 386, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : { 'number' : 55, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : {", "413, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", ": 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : { 'arbnumber' : 148, 'flags' : { 'public'", "10, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'SGI' },", "'public' }, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' :", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : { 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : { 'arbnumber' : 23, 'flags'", "}, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : { 'arbnumber' : 33, 'flags' :", "}, 'GL_ATI_texture_float' : { 'number' : 280, 'flags' : { 'public' }, 'supporters'", ": 248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' :", "'number' : 495, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' :", ": 165, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url'", "}, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : { 'number' : 217, 'flags' :", ": 226, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'arbnumber' : 179, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', },", "}, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' :", "}, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : { 'flags' : { 'incomplete', 'public'", "'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' : 79, 'flags' : { 'public' }, 'url' :", "'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' : 195, 'flags' : { 'public' }, 'supporters' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : { 'number' : 352, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : { 'number' : 33, 'flags' :", "}, 'GL_EXT_copy_texture' : { 'number' : 10, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : { 'arbnumber'", ": { 'arbnumber' : 18, 'flags' : { 'public' }, 'supporters' : {", ": 199, 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', },", "}, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : { 'number' : 11, 'flags' :", "'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : { 'number' : 395, 'flags' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent' },", "'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : { 'number' : 361, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : { 'arbnumber' : 61, 'flags' : { 'public'", ": { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource'", "{ 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : {", "219, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : {", "'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : { 'esnumber' : 82, 'flags'", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : { 'esnumber' : 173,", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', },", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt',", "'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : { 'number' : 439, 'esnumber' : 98, 'flags' :", "'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : { 'number' : 227, 'flags' : { 'public' },", "{ 'number' : 431, 'flags' : { 'public' }, 'supporters' : { 'AMD'", ": 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : { 'arbnumber' : 122, 'flags' : { 'public'", "{ 'number' : 97, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", ": { 'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : { 'esnumber' :", "}, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : { 'number' : 220, 'flags' :", "{ 'number' : 49, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : { 'number' : 370, 'flags' :", "}, 'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : { 'esnumber' : 31, 'flags' :", "{ 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage'", "{ 'arbnumber' : 187, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', },", "}, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : { 'number' : 447, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' :", "}, 'GL_EXT_packed_float' : { 'number' : 328, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : { 'esnumber' : 28, 'flags' :", ": { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', },", "'number' : 316, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample'", "'GLX_ARB_create_context_robustness' : { 'arbnumber' : 101, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias'", "'public' }, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' :", "'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : { 'number'", "'MS', 'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : { 'number' : 302,", ": 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : { 'number' : 329, 'flags' : { 'public'", ": 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : { 'number' : 330, 'flags' : { 'public'", ": { 'number' : 465, 'esnumber' : 228, 'flags' : { 'public' },", ": 14, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": { 'number' : 473, 'esnumber' : 236, 'flags' : { 'public' },", "'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : { 'esnumber' : 193, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : { 'esnumber' : 47, 'flags'", "}, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : { 'number' : 44, 'flags' :", ": { 'esnumber' : 239, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt',", "'GL_SGIX_texture_range' : { 'number' : 181, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' :", ": 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : { 'number' : 115, 'flags' : { 'public'", "{ 'number' : 212, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'GL_IMG_program_binary' : { 'esnumber' : 67, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : { 'number' : 360, 'esnumber' :", "'GL_OES_rgb8_rgba8' : { 'esnumber' : 30, 'flags' : { 'public' }, 'url' :", "}, 'GL_ARB_shader_texture_lod' : { 'arbnumber' : 60, 'flags' : { 'public' }, 'url'", "'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' : 289,", ": 188, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' :", "'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : { 'number' : 174, 'flags'", ": 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : { 'number' : 366, 'flags' : { 'public'", "'GL_EXT_texture_format_BGRA8888' : { 'esnumber' : 51, 'flags' : { 'public' }, 'url' :", "'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : { 'number' : 213, 'flags' : { 'public' },", "'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : {", "{ 'arbnumber' : 182, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', },", "'GL_IMG_user_clip_plane' : { 'esnumber' : 57, 'flags' : { 'public' }, 'url' :", "462, 'esnumber' : 226, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', },", "{ 'ATI', 'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : { 'number' :", "32, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url'", "'GLX_MESA_swap_control' : { 'number' : 514, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : { 'number'", ": 129, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' :", "'flags' : { 'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : { 'number'", "}, 'GL_EXT_blend_func_extended' : { 'esnumber' : 247, 'flags' : { 'public' }, 'url'", "}, 'GL_EXT_memory_object' : { 'number' : 503, 'esnumber' : 280, 'flags' : {", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' :", "{ 'GL_3DFX_multisample' : { 'number' : 207, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : { 'number' : 260, 'flags' : {", ": { 'arbnumber' : 22, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'supporters' : { 'HP', 'KGC', 'SGI', 'SUN' }, 'url'", ": { 'arbnumber' : 175, 'esnumber' : 243, 'flags' : { 'public' },", "489, 'esnumber' : 296, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', },", "}, 'supporters' : { 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt', },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer' }, },", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' :", "}, 'GL_ARB_compatibility' : { 'arbnumber' : 58, 'flags' : { 'public' }, 'url'", ": 149, 'flags' : { 'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND'", "'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : { 'number' : 281, 'flags'", "'GL_NV_float_buffer' : { 'number' : 281, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : { 'esnumber' : 145,", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : {", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : { 'esnumber'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : {", "'GL_ATI_fragment_shader' : { 'number' : 245, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 440, 'esnumber' : 99, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt',", "}, 'GL_APPLE_ycbcr_422' : { 'number' : 275, 'flags' : { 'public' }, 'supporters'", "'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' : 169, 'esnumber' : 189, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt',", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt',", ": { 'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : { 'arbnumber' :", "'number' : 464, 'esnumber' : 227, 'flags' : { 'public' }, 'url' :", "{ 'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : { 'number' : 66,", "{ 'esnumber' : 31, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil1.txt', },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt', },", "'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : { 'arbnumber' : 68, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : { 'esnumber' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : { 'number' : 471, 'esnumber'", "}, 'GL_EXT_draw_instanced' : { 'number' : 327, 'esnumber' : 157, 'flags' : {", "{ 'arbnumber' : 172, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', },", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : { 'number' :", "'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : { 'number' : 147, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : { 'esnumber'", ": 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : { 'number' : 35, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : { 'arbnumber'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : { 'number' :", ": 356, 'flags' : { 'public' }, 'supporters' : { 'IdSoftware', 'NVIDIA' },", ": 238, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt',", "}, 'GL_MTK_program_binary' : { 'esnumber' : 245, 'flags' : { 'incomplete', 'private' },", "'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : { 'number' : 471, 'esnumber' : 234, 'flags' :", "{ 'esnumber' : 45, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt', },", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt',", "}, 'GL_AMD_texture_texture4' : { 'number' : 362, 'flags' : { 'public' }, 'supporters'", "'arbnumber' : 113, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range'", "'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt',", "}, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : { 'number' : 394, 'flags' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt', },", "}, 'GL_EXT_texture_sRGB_decode' : { 'number' : 402, 'esnumber' : 152, 'flags' : {", ": { 'number' : 168, 'flags' : { 'public' }, 'supporters' : {", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : { 'arbnumber' :", "}, 'GL_OES_fixed_point' : { 'number' : 292, 'esnumber' : 9, 'flags' : {", "{ 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' :", "427, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url'", "'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : { 'number'", ": 'Included with arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : { 'arbnumber' : 101,", "{ 'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : { 'number' : 291,", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : { 'number' : 232, 'flags'", "{ 'number' : 433, 'esnumber' : 163, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber'", "'arbnumber' : 77, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc'", "'arbnumber' : 25, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : { 'esnumber' : 248, 'flags' : { 'public' },", ": 119, 'esnumber' : 118, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_debug.txt',", "'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : { 'esnumber' : 293, 'flags' : {", "}, 'GL_EXT_texture_compression_rgtc' : { 'number' : 332, 'esnumber' : 286, 'flags' : {", "'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : { 'esnumber' : 131, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt',", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : { 'number'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : { 'number' : 459,", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'DEC', 'HP', 'IBM', 'INGR', 'KGC', 'SGI' },", "}, 'GL_NV_pixel_data_range' : { 'number' : 284, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : {", "'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : { 'esnumber' : 100, 'flags'", ": 185, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' },", ": { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : { 'number' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : { 'arbnumber' :", ": 122, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' :", "{ 'number' : 192, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'number' : 24, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber'", ": { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : { 'number'", "'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : { 'esnumber' : 27, 'flags' : {", "}, 'GL_EXT_shader_texture_lod' : { 'esnumber' : 77, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt',", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : { 'flags' :", "'number' : 224, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists'", ": { 'esnumber' : 63, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt',", "}, 'GL_SGIX_instrument_error' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', },", "'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : { 'arbnumber' : 37, 'flags' : {", "'GL_INGR_color_clamp' : { 'number' : 174, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', },", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : {", "'GL_SGIX_slim' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff'", ": 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' : { 'number' :", "'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : { 'esnumber' : 278, 'flags' : {", "328, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt',", "62, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : {", ": 441, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url'", "'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : { 'number' : 446, 'flags' : {", "'GLX_SGIX_video_resize' : { 'number' : 83, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : { 'arbnumber' : 132,", "{ 'number' : 355, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : { 'flags' : {", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : { 'number'", "'number' : 519, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 81, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' : 86, 'flags' : {", "}, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : { 'number' : 223, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : { 'arbnumber' :", "'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : { 'number' : 489, 'esnumber'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : { 'arbnumber' : 91,", "'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : { 'number' : 44, 'flags' : { 'public' },", "'arbnumber' : 53, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : { 'arbnumber' : 84,", "{ 'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : { 'arbnumber' : 54,", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : {", "'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : { 'number' : 508, 'esnumber' : 284,", "}, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber' : 204, 'flags' :", "{ 'number' : 169, 'flags' : { 'public' }, 'supporters' : { 'INGR',", "{ 'number' : 44, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'esnumber' : 191, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' :", "'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : { 'number' : 221, 'flags' : { 'public' },", "'arbnumber' : 105, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation'", "{ 'number' : 244, 'flags' : { 'public' }, 'supporters' : { 'ATI'", ": { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : { 'number' :", "'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared extension number 15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' :", "'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber' : 95, 'flags' : {", "{ 'number' : 293, 'esnumber' : 18, 'flags' : { 'public' }, 'supporters'", "173, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : {", "'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : { 'number' : 134, 'flags' : { 'incomplete', 'public'", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' :", "'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : { 'number' : 495, 'flags' : { 'public' },", "}, 'GL_SGIX_pixel_tiles' : { 'number' : 46, 'flags' : { 'obsolete' }, 'supporters'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : { 'esnumber' :", ": { 'arbnumber' : 65, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt',", "'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : { 'arbnumber' : 67, 'flags' : { 'public' },", ": 219, 'flags' : { 'incomplete' }, 'supporters' : { 'MESA' }, 'url'", "{ 'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : { 'esnumber' : 210,", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : { 'number' :", "'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : { 'number' : 53, 'flags' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : {", ": 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : { 'number' : 210, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : { 'number' : 344, 'flags'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' :", ": { 'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : { 'esnumber' :", "'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : { 'number' : 486, 'esnumber' : 295, 'flags' :", "'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : { 'number' : 167, 'flags'", ": { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : { 'arbnumber'", ": { 'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : { 'number' :", "'GL_MTK_program_binary' : { 'esnumber' : 245, 'flags' : { 'incomplete', 'private' }, 'url'", ": { 'number' : 413, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : { 'flags' : { 'obsolete' },", "'GL_EXT_clip_control' : { 'esnumber' : 290, 'flags' : { 'public' }, 'supporters' :", ": { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : { 'esnumber'", ": { 'number' : 193, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : { 'number' : 183, 'flags' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5'", "}, 'GL_ARB_occlusion_query' : { 'arbnumber' : 29, 'flags' : { 'public' }, 'supporters'", ": { 'arbnumber' : 147, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt',", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt',", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : { 'number'", "{ 'esnumber' : 138, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', },", "{ 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', },", "}, 'GL_NV_texture_array' : { 'esnumber' : 133, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : { 'number' : 334, 'flags'", ": 228, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' :", "{ 'esnumber' : 144, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', },", "341, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : { 'arbnumber' : 121, 'flags' :", "'GL_NV_bgr' : { 'esnumber' : 135, 'flags' : { 'public' }, 'url' :", "'number' : 282, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 65, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR',", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt',", "'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : { 'number' : 423, 'flags' : { 'public' },", "{ 'number' : 274, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "227, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : { 'arbnumber' : 24, 'flags' : { 'public'", "66, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : {", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : { 'number' : 151,", ": 86, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' :", ": 490, 'esnumber' : 263, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 175, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : { 'number'", ": 77, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : { 'number'", "'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : { 'number' : 206, 'flags' : {", "'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' : 99, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side'", "}, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : { 'flags' : { 'incomplete' },", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : { 'arbnumber' :", ": 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber' : 187, 'flags' : { 'public'", "'flags' : { 'obsolete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt',", "120, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker'", "'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : {", ": 277, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' :", "{ 'number' : 280, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' : 149,", "'esnumber' : 227, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : { 'arbnumber' : 53,", "{ 'number' : 202, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "}, 'GL_OES_depth_texture_cube_map' : { 'esnumber' : 136, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', },", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt',", "'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : { 'arbnumber' : 100, 'flags'", "{ 'number' : 412, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'GL_ATI_meminfo' : { 'number' : 359, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_SGIX_pixel_texture_lod' : { 'number' : 128, 'flags' : { 'incomplete' }, 'supporters'", "}, 'GL_OES_single_precision' : { 'number' : 293, 'esnumber' : 18, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' :", "'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : { 'arbnumber' : 144, 'flags' : {", "}, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number' : 397, 'flags' :", ": { 'esnumber' : 44, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt',", ": 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : { 'number' : 466, 'esnumber' : 232, 'flags'", "}, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : { 'number' : 361, 'flags' :", ": 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : { 'arbnumber' : 88, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : { 'number' :", "'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension which is referred to", "'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : { 'flags' : { 'public' }, 'supporters'", ": 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : { 'esnumber' : 89, 'flags' : { 'public'", ": 60, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'GL_SGIX_color_table_index_mode' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type'", ": { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : { 'number'", "{ 'number' : 287, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' : 137, 'flags' : { 'public'", "}, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' :", ": 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : { 'flags' : { 'incomplete' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback'", ": 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' : { 'number' :", ": 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : { 'number' : 113, 'flags' : { 'public'", "}, 'GL_AMD_shader_trinary_minmax' : { 'number' : 428, 'flags' : { 'public' }, 'supporters'", "'GL_NV_sRGB_formats' : { 'esnumber' : 148, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : { 'arbnumber' : 40,", "}, 'GL_IMG_framebuffer_downsample' : { 'esnumber' : 255, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : { 'arbnumber' : 192, 'esnumber'", "'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : { 'esnumber' : 128, 'flags'", "'number' : 169, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI'", ": 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : { 'esnumber' : 130, 'flags' : { 'public'", "{ 'number' : 323, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : { 'esnumber' : 156, 'flags' : {", "'esnumber' : 184, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env'", "}, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : { 'flags' : { 'incomplete' },", "295, 'esnumber' : 17, 'flags' : { 'public' }, 'supporters' : { 'KHR'", ": { 'public' }, 'supporters' : { 'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', },", "49, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url'", "{ 'number' : 239, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "89, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'WGL_EXT_swap_control_tear' : { 'number' : 415, 'flags' : { 'public' }, 'supporters'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : { 'number' : 353,", ": 82, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : { 'number' : 158, 'flags' : {", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : { 'number' :", "92, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : {", "}, 'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : { 'number' : 472, 'esnumber' :", ": 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' : 275, 'flags' : { 'public'", "'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' : 165, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : { 'number' : 391,", "{ 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : { 'arbnumber' : 10,", "}, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : { 'arbnumber' : 78, 'flags' :", ": { 'arbnumber' : 187, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt',", ": 24, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : { 'number' : 51,", "}, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : { 'arbnumber' : 23, 'flags' :", "'esnumber' : 154, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA'", ": 172, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : { 'flags'", "}, 'GL_SGIX_fog_texture' : { 'flags' : { 'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', },", ": 268, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' },", "'WGL_ARB_create_context_profile' : { 'arbnumber' : 74, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : { 'number' : 391, 'flags' : {", "261, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : {", "'esnumber' : 30, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading'", "'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' : 298, 'flags' : { 'public' },", "'esnumber' : 22, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot'", "'number' : 240, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "}, 'GL_WIN_scene_markerXXX' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', },", "}, 'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : { 'esnumber' : 132, 'flags' :", ": 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' : 298, 'flags' : { 'public'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : { 'number'", ": 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : { 'esnumber' : 123, 'flags' : { 'public'", "160, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt',", "{ 'number' : 455, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' : 277, 'flags'", "127, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : { 'number'", "}, 'GL_EXT_texture_mirror_clamp' : { 'number' : 298, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 383, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : {", ": 158, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' :", "'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : { 'number' : 514, 'flags'", ": 'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' : { 'number'", "}, 'GL_AMD_blend_minmax_factor' : { 'number' : 404, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : { 'number' : 522,", "}, 'GL_NV_geometry_shader_passthrough' : { 'number' : 470, 'esnumber' : 233, 'flags' : {", ": { 'number' : 405, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', },", "'GL_EXT_sRGB' : { 'esnumber' : 105, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : { 'number' : 119, 'flags' :", "'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : { 'esnumber' : 251, 'flags' : { 'public' },", "'url' : 'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : { 'esnumber' : 200,", "'GL_SGIX_subsample' : { 'number' : 202, 'flags' : { 'incomplete' }, 'supporters' :", ": { 'esnumber' : 104, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt',", "138, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : {", ": { 'number' : 93, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt',", "54, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : {", "'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : { 'esnumber' : 62, 'flags' : { 'public' },", "'GL_NV_EGL_stream_consumer_external' : { 'esnumber' : 104, 'flags' : { 'public' }, 'url' :", "'GL_ARB_half_float_vertex' : { 'arbnumber' : 48, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store'", "}, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' : 42, 'flags' : { 'public' }, 'url'", "'number' : 295, 'esnumber' : 17, 'flags' : { 'public' }, 'supporters' :", "'arbnumber' : 163, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access'", ": 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' : 62, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', },", ": { 'esnumber' : 45, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt',", "{ 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' : { 'number' : 504, 'esnumber' : 281,", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : { 'esnumber' :", "{ 'public' }, 'supporters' : { 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', },", "1, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' :", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt',", "'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : { 'arbnumber' : 172, 'flags' : { 'public' },", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : { 'arbnumber' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing'", "}, 'GL_EXT_texture_array' : { 'number' : 329, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' : { 'number'", ": { 'number' : 253, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_EXT_point_parameters' : { 'number' : 54, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber' : 187, 'flags' :", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : {", "'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : { 'esnumber' : 245, 'flags' : { 'incomplete', 'private'", ": { 'esnumber' : 25, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth32.txt',", "'arbnumber' : 34, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "{ 'number' : 112, 'flags' : { 'public' }, 'supporters' : { 'MS'", "{ 'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : { 'number' : 384,", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels'", "'GL_ARB_derivative_control' : { 'arbnumber' : 163, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : { 'arbnumber' : 55, 'flags' :", ": 209, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : {", "'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : { 'number' : 41, 'flags' : { 'public' },", ": 108, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' :", "{ 'esnumber' : 207, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', },", "{ 'ES', 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : {", "}, 'WGL_I3D_genlock' : { 'number' : 252, 'flags' : { 'public' }, 'supporters'", ": 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : { 'arbnumber' : 97, 'flags' : { 'public'", "'number' : 54, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt',", ": { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', },", "'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : { 'number' : 296, 'esnumber'", "'number' : 504, 'esnumber' : 281, 'flags' : { 'public' }, 'url' :", ": 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' : 487, 'esnumber' : 262, 'flags'", "'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber' : 129, 'flags'", "'number' : 12, 'flags' : { 'public' }, 'supporters' : { 'HP', 'KGC',", "'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : { 'esnumber' : 130, 'flags' : { 'public' },", "{ 'number' : 292, 'esnumber' : 9, 'flags' : { 'public' }, 'supporters'", "'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : { 'number' : 239, 'flags' : { 'public' },", ": 297, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', },", "}, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : { 'number' : 379, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt',", ": { 'number' : 137, 'flags' : { 'public' }, 'supporters' : {", "271, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'GL_SGIX_texture_supersample' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', },", "}, 'GL_NV_shader_thread_group' : { 'number' : 447, 'flags' : { 'public' }, 'url'", "13, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "180, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : {", "'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : { 'esnumber' : 255, 'flags' : { 'public' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : { 'number'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' :", "'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' : 46, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : { 'esnumber' : 131, 'flags' : {", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture'", "'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : { 'arbnumber' : 138, 'flags'", "203, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : {", "}, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : {", "}, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : { 'esnumber' : 7, 'flags' :", ": { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : { 'arbnumber' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt',", "'alias' : { 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' : { 'number' : 21, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt',", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax'", ": { 'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : { 'number' :", "}, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' :", "}, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : { 'esnumber' : 27, 'flags' :", "'esnumber' : 89, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control'", "290, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "{ 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex'", "'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : { 'esnumber' : 213, 'flags'", "'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : { 'arbnumber' : 82, 'flags' : {", ": 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : { 'number' : 394, 'flags' : { 'public'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : { 'number' : 360,", ": 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : { 'number' : 246, 'flags' : { 'public'", "}, 'GL_NV_explicit_attrib_location' : { 'esnumber' : 159, 'flags' : { 'public' }, 'url'", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : { 'number'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : {", "'number' : 99, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : { 'number' : 254, 'flags' : {", "}, 'GL_NV_stereo_view_rendering' : { 'number' : 489, 'esnumber' : 296, 'flags' : {", ": 88, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "}, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : { 'number' : 170, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' :", ": 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : { 'esnumber' : 30, 'flags' : { 'public'", "356, 'flags' : { 'public' }, 'supporters' : { 'IdSoftware', 'NVIDIA' }, 'url'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : {", ": { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : { 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt',", "'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : { 'arbnumber' : 183, 'flags' : { 'public' },", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : { 'arbnumber' : 73,", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : { 'arbnumber'", "}, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber' : 52, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 3, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'esnumber' : 210, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' :", "'GL_OES_copy_image' : { 'esnumber' : 208, 'flags' : { 'public' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : { 'number' : 484,", ": 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : { 'number' : 451, 'flags' : { 'public'", "'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt',", "'GL_AMD_occlusion_query_event' : { 'number' : 442, 'flags' : { 'public' }, 'supporters' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : { 'number' : 46,", "'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber' : 187, 'flags'", "'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt',", ": 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : { 'esnumber' : 219, 'flags' : { 'public'", "}, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : { 'number' : 254, 'flags' :", "'number' : 231, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', },", ": 64, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'number' : 250, 'flags' : { 'public' }, 'supporters' : { 'I3D'", "'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : { 'number' : 428, 'flags'", "}, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' :", ": { 'number' : 155, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' :", "'GL_EXT_gpu_program_parameters' : { 'number' : 320, 'flags' : { 'public' }, 'supporters' :", ": { 'arbnumber' : 183, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber' :", "'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' : { 'esnumber' : 35, 'flags' : { 'public'", ": 174, 'esnumber' : 168, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt',", ": { 'esnumber' : 3, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt',", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : { 'number' :", "{ 'public' }, 'supporters' : { 'ES', 'INGR', 'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt',", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt',", "{ 'number' : 426, 'flags' : { 'public' }, 'supporters' : { 'AMD'", ": 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : { 'number' : 137, 'flags' : { 'public'", "'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : { 'number' : 483, 'esnumber' : 258,", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : { 'number'", "'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' : { 'arbnumber'", "{ 'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : { 'esnumber' : 45,", ": 280, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : { 'number' : 15, 'flags' : {", "}, 'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : { 'number' : 377, 'esnumber' :", "}, 'GL_SGIS_generate_mipmap' : { 'number' : 32, 'flags' : { 'public' }, 'supporters'", "}, 'WGL_EXT_swap_control' : { 'number' : 172, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : { 'number'", ": 195, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : { 'esnumber'", "'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : { 'esnumber' : 8, 'flags' : {", ": 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : { 'esnumber' : 211, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' :", "}, 'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : { 'number' : 10, 'flags' :", ": 255, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url'", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/WGL_OML_sync_control.txt', },", ": { 'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : { 'arbnumber' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap'", ": 230, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' :", "'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : { 'esnumber' : 88, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' : 487,", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to GLX_ARB_create_context_profile", "}, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', },", "}, 'GL_ARB_post_depth_coverage' : { 'arbnumber' : 180, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : { 'esnumber' : 150, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt',", "'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : { 'number' : 472, 'esnumber' : 235, 'flags' :", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : { 'number' : 289,", ": 31, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' :", "'number' : 445, 'flags' : { 'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float'", "'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : { 'number' : 197, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber' : 142, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' },", "to by some other vendor extensions, but shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' :", "491, 'esnumber' : 265, 'flags' : { 'public' }, 'supporters' : { 'INTEL'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : { 'arbnumber'", "{ 'arbnumber' : 166, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', },", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt',", "{ 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber' : 142,", ": { 'number' : 37, 'esnumber' : 65, 'flags' : { 'public' },", "'esnumber' : 29, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get'", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', },", "'number' : 342, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number' : 397, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : { 'esnumber' : 64, 'flags' :", ": { 'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : { 'number' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : { 'number' :", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' :", "}, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : { 'number' : 245, 'flags' :", ": 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : { 'esnumber' : 146, 'flags' : { 'public'", "}, 'GL_KHR_context_flush_control' : { 'arbnumber' : 168, 'esnumber' : 191, 'flags' : {", "'GL_EXT_framebuffer_sRGB' : { 'number' : 337, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt',", "}, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : { 'flags' : { 'incomplete' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : { 'number' : 315,", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' : 59,", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' :", "'esnumber' : 297, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles'", "}, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : { 'esnumber' : 179, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt',", "'GL_NV_generate_mipmap_sRGB' : { 'esnumber' : 144, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1'", ": 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : { 'number' : 370, 'flags' : { 'public'", "'number' : 87, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : { 'number' : 359, 'flags'", "{ 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : { 'number' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : { 'flags' :", "'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' :", "}, 'GL_NV_texture_shader2' : { 'number' : 231, 'flags' : { 'public' }, 'supporters'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : { 'number' : 179,", ": 378, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : { 'number' : 383, 'flags' : {", "'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' :", "{ 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : { 'number' : 158, 'flags'", ": { 'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : { 'number' :", "363, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", ": 196, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : { 'number' : 479, 'esnumber'", "}, 'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : { 'esnumber' : 115, 'flags' :", "'GL_SGIX_reference_plane' : { 'number' : 60, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { '3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : {", ": 267, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : { 'number' : 412,", "'supporters' : { 'ES', 'HP', 'IBM', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt', },", "'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : { 'esnumber' :", "{ 'number' : 499, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow'", "'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : { 'number' : 486, 'esnumber' : 295,", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : {", "'arbnumber' : 116, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow'", ": 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : { 'arbnumber' : 147, 'flags' : { 'public'", "'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : { 'number' : 101, 'flags'", "'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' : { 'number' : 21, 'flags' : { 'public'", "398, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "}, 'GL_NV_fragment_program_option' : { 'number' : 303, 'flags' : { 'public' }, 'supporters'", "'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : { 'esnumber' : 202, 'flags' : { 'public' },", "14, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : {", ": { 'arbnumber' : 98, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt',", ": { 'public' }, 'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : { 'number' :", ": 33, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' :", ": { 'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : { 'number' :", "{ 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' : { 'esnumber' : 205, 'flags' : {", "arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : { 'arbnumber' : 101, 'flags' : {", ": 497, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size' },", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : { 'number'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss'", "'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : { 'arbnumber' : 25, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : { 'arbnumber' : 117, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt',", "'GL_NV_sample_locations' : { 'number' : 472, 'esnumber' : 235, 'flags' : { 'public'", ": 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : { 'esnumber' : 21, 'flags' : { 'public'", "}, 'GL_EXT_vertex_weighting' : { 'number' : 188, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' :", "'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : { 'number' : 290, 'flags'", "345, 'flags' : { 'public' }, 'supporters' : { 'GREMEDY' }, 'url' :", "41, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : { 'number' :", "}, 'GL_EXT_multi_draw_arrays' : { 'number' : 148, 'esnumber' : 69, 'flags' : {", "'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with arbnumber 56, GLX_ARB_create_context.', },", "'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : { 'esnumber' : 27, 'flags' : { 'public' },", "134, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt',", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' :", "{ 'number' : 454, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', },", ": 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' : 185, 'flags' : { 'public'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : { 'esnumber' : 158,", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt',", "'number' : 128, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber'", "{ '3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : { 'number' : 361,", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' :", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : {", ": 117, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt',", ": { 'number' : 300, 'flags' : { 'public' }, 'supporters' : {", ": { 'esnumber' : 94, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt',", "}, 'supporters' : { 'ES', 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : {", "'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI' },", ": 207, 'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url'", "'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : { 'number' : 66, 'flags'", ": { 'number' : 229, 'flags' : { 'public' }, 'supporters' : {", ": { 'incomplete', 'private' }, 'comments' : 'Draft spec location unknown.', }, 'GL_OES_point_size_array'", ": { 'esnumber' : 172, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt',", ": 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : { 'esnumber' : 217, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : { 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt', 'alias' :", ": { 'number' : 516, 'esnumber' : 294, 'flags' : { 'public' },", "}, 'GL_NV_conservative_raster_underestimation' : { 'number' : 518, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', },", "'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : { 'esnumber' : 73, 'flags' : {", "'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : { 'number' : 57, 'flags' : { 'public' },", "{ 'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : { 'number' : 166,", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : { 'number' :", ": { 'ES', 'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : { 'number' : 462,", "}, 'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : { 'esnumber' : 149, 'flags' :", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : {", "'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : { 'number' : 430, 'esnumber' : 126,", "}, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : { 'flags' : { 'incomplete', 'obsolete'", "}, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : { 'flags' : { 'incomplete' },", ": { 'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : { 'number' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : {", "}, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : { 'arbnumber' : 116, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : { 'esnumber'", ": 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : { 'arbnumber' : 103, 'flags' : { 'public'", ": { 'arbnumber' : 191, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt',", "'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : { 'arbnumber' : 111, 'flags'", ": { 'number' : 316, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt',", ": 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : { 'esnumber' : 105, 'flags' : { 'public'", ": 375, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into EXT_texture_env_combine.', },", "'GL_ARB_program_interface_query' : { 'arbnumber' : 134, 'flags' : { 'public' }, 'url' :", "'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : { 'number'", "'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : { 'number' : 28, 'flags' : {", "'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : { 'number'", "'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : { 'number' : 474, 'esnumber' : 261,", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt', },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : { 'arbnumber' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : {", "'GL_AMD_gpu_shader_half_float' : { 'number' : 496, 'flags' : { 'public' }, 'supporters' :", "{ 'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : { 'number' : 250,", ": 139, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' :", "'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : { 'esnumber' : 139, 'flags' : {", "}, 'GL_OES_fbo_render_mipmap' : { 'esnumber' : 27, 'flags' : { 'public' }, 'url'", ": 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : { 'number' : 483, 'esnumber' : 258, 'flags'", "{ 'arbnumber' : 86, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', },", "}, 'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : { 'number' : 147, 'flags' :", "'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped but was not fully", ": { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt',", "417, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", ": 171, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' :", "'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : { 'number' : 204, 'flags' : {", "'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' : { 'arbnumber' : 58, 'flags' : {", "'GLX_SGIX_fbconfig' : { 'number' : 49, 'flags' : { 'public' }, 'supporters' :", "'number' : 383, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : { 'number' : 218, 'flags' : { 'public' },", "486, 'esnumber' : 295, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', },", "2, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI',", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' :", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt',", "'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : { 'number' : 182, 'flags' : { 'public' },", ": { 'arbnumber' : 182, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt',", "{ 'MS', 'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : { 'number'", "'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : { 'number'", "'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : { 'esnumber' : 83, 'flags' : { 'public' },", ": { 'esnumber' : 129, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt',", "'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : { 'esnumber' : 10, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : { 'esnumber'", "{ 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : { 'number' :", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', },", ": 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : { 'number' : 269, 'flags' : { 'public'", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' :", ": { 'number' : 219, 'flags' : { 'incomplete' }, 'supporters' : {", ": 225, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : { 'number' :", "'GL_SGIX_datapipe' : { 'number' : 152, 'flags' : { 'incomplete' }, 'url' :", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' :", "}, 'GL_QCOM_texture_foveated' : { 'esnumber' : 293, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap'", "'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : { 'arbnumber' : 88, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', },", "}, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' : 155, 'flags' : { 'public' }, 'url'", "}, 'GLX_SGIX_wait_group' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', },", "'esnumber' : 16, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : { 'arbnumber' : 179,", ": 502, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : { 'number' : 496, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt',", ": 52, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' :", ": { 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' : { 'number' : 505, 'esnumber' :", "'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer'", "}, 'GL_ARB_multisample' : { 'arbnumber' : 5, 'flags' : { 'public' }, 'supporters'", "29, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'number' : 488, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', },", ": { 'esnumber' : 142, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt',", ": 177, 'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url'", "{ 'arbnumber' : 174, 'esnumber' : 168, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : { 'esnumber' : 193, 'flags' :", "}, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : { 'esnumber' : 81, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' :", ": 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : { 'number' : 353, 'flags' : { 'public'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : { 'number' : 509,", "{ 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with arbnumber 55, WGL_ARB_create_context.',", "'esnumber' : 98, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : { 'number'", ": { 'number' : 404, 'flags' : { 'public' }, 'supporters' : {", "'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP support.', }, 'GL_SGI_complex' :", "{ 'number' : 222, 'esnumber' : 52, 'flags' : { 'public' }, 'supporters'", "{ 'esnumber' : 82, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt', },", "{ 'arbnumber' : 97, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : { 'arbnumber' : 66,", "'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : { 'esnumber' : 82, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt',", "'number' : 419, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'number' : 510, 'esnumber' : 285, 'flags' : { 'public' },", "{ 'number' : 269, 'flags' : { 'public' }, 'supporters' : { 'APPLE',", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' : 111,", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : { 'number'", "}, 'GL_EXT_texture_filter_minmax' : { 'number' : 464, 'esnumber' : 227, 'flags' : {", "'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : { 'number' : 184, 'flags' : { 'incomplete', 'public'", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : { 'number'", "'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : { 'number' : 301, 'flags' : {", "'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber' : 104, 'flags' : { 'public' },", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : { 'number' : 50,", "'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : { 'number' : 304, 'flags' : {", ": 114, 'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url'", "}, 'GL_AMD_compressed_ATC_texture' : { 'esnumber' : 40, 'flags' : { 'public' }, 'url'", "'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : { 'number' : 363, 'flags' : { 'public' },", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix'", "{ 'number' : 178, 'flags' : { 'public' }, 'supporters' : { 'INGR'", ": { 'number' : 419, 'flags' : { 'public' }, 'supporters' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : { 'number' : 69, 'flags'", ": 50, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'GL_NV_gpu_program4' : { 'number' : 322, 'flags' : { 'public' }, 'supporters' :", "'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' : 276, 'flags' : { 'public' },", ": { 'esnumber' : 251, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt',", "{ 'number' : 210, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : { 'number' : 259, 'flags' :", ": 479, 'esnumber' : 242, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt',", "'GL_ARB_blend_func_extended' : { 'arbnumber' : 78, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' :", "'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : { 'esnumber' : 206, 'flags' : {", ": { 'number' : 327, 'esnumber' : 157, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : { 'number' : 142, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : { 'number' :", ": 162, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' :", "}, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' : 62, 'flags' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : { 'number' : 248, 'flags'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : { 'flags'", "'esnumber' : 212, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix'", ": 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : { 'arbnumber' : 146, 'flags' : { 'public'", "{ 'esnumber' : 104, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', },", "'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : { 'number' : 431, 'flags' : {", "'GL_SGIX_ycrcb' : { 'number' : 101, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI' }, 'url'", "}, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' : 289, 'flags' : { 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : { 'esnumber' : 219, 'flags'", "'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber' : 194, 'flags' : { 'public' },", ": { 'number' : 260, 'flags' : { 'public' }, 'supporters' : {", ": 55, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' : { 'number' : 285, 'flags' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', },", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch'", "28, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : {", ": { '3DL', 'ATI', 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' :", "'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : { 'number' : 283, 'flags' : { 'public' },", "{ 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : { 'number' : 300,", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : { 'flags' :", "'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : { 'number' : 220, 'flags' : { 'public' },", ": { 'number' : 143, 'flags' : { 'incomplete' }, 'supporters' : {", "208, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : {", "'GL_OES_texture_buffer' : { 'esnumber' : 216, 'flags' : { 'public' }, 'url' :", "158, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : {", "}, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : { 'number' : 343, 'flags' :", "507, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : { 'number' : 166, 'flags' : { 'public' },", "'WGL_NV_render_depth_texture' : { 'number' : 263, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : { 'esnumber' : 43,", "'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : { 'number' : 313, 'flags' : {", "'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : { 'arbnumber' : 98, 'flags' : {", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', },", "}, 'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : { 'esnumber' : 71, 'flags' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : {", ": { 'number' : 386, 'flags' : { 'public' }, 'supporters' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : { 'arbnumber' : 180,", ": { 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' : { 'number' : 192, 'flags' :", ": { 'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : { 'number' :", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : { 'number' :", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : { 'esnumber' : 273,", "'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : { 'number' : 356, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : { 'arbnumber' : 179, 'flags' :", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : { 'esnumber' :", "'number' : 451, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements'", "'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : { 'number' : 251, 'flags'", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : {", "'GL_OES_stencil1' : { 'esnumber' : 31, 'flags' : { 'public' }, 'url' :", "128, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : { 'esnumber' : 19, 'flags' : {", "}, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : { 'esnumber' : 125, 'flags' :", "'esnumber' : 97, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable'", "'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : { 'esnumber' : 129, 'flags' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : { 'number' : 296,", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : { 'esnumber' : 103,", "'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : {", "'number' : 500, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr'", ": { 'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : { 'esnumber' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : { 'esnumber'", "}, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : { 'esnumber' : 89, 'flags' :", ": 168, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' },", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/WGL_OML_sync_control.txt',", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : { 'number' : 58,", "'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber' : 159, 'flags'", "'arbnumber' : 64, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object'", ": 11, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' :", ": { 'number' : 264, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : { 'number'", "see arbnumber 75.', }, 'GLX_ARB_create_context_no_error' : { 'arbnumber' : 191, 'flags' : {", "}, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : {", ": { 'esnumber' : 143, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt',", ": 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : { 'number' : 217, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt',", "'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' : 487, 'esnumber' : 262, 'flags' : { 'public'", "'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function'", "{ 'number' : 126, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", ": 9, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : { 'arbnumber' : 100,", ": 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : { 'arbnumber' : 76, 'flags' : { 'public'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : {", ": 148, 'esnumber' : 69, 'flags' : { 'public' }, 'supporters' : {", "{ 'number' : 131, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'number' : 312, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : { 'number' : 486,", ": { 'arbnumber' : 154, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt',", "}, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' : 298, 'flags' :", ": { 'arbnumber' : 124, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt',", "'GL_EXT_index_texture' : { 'number' : 93, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : { 'arbnumber'", "'number' : 222, 'esnumber' : 52, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt',", "'GL_NV_texture_array' : { 'esnumber' : 133, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : { 'arbnumber'", "'GL_ATI_envmap_bumpmap' : { 'number' : 244, 'flags' : { 'public' }, 'supporters' :", "{ 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : { 'flags' :", "'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : { 'esnumber' : 31, 'flags'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : {", "'GL_ARB_shader_atomic_counters' : { 'arbnumber' : 114, 'flags' : { 'public' }, 'url' :", "}, 'GL_ARB_compute_variable_group_size' : { 'arbnumber' : 153, 'flags' : { 'public' }, 'url'", ": { 'number' : 225, 'flags' : { 'public' }, 'supporters' : {", "{ 'number' : 47, 'flags' : { 'public' }, 'supporters' : { 'IBM',", "'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : { 'number' : 266, 'flags' : {", "'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : { 'esnumber' : 171, 'flags' : {", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : { 'number' :", "'WGL_NV_swap_group' : { 'number' : 351, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : { 'number' : 513, 'flags' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc'", ": 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : { 'esnumber' : 175, 'flags' : { 'public'", "'number' : 435, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "}, 'GL_ARB_depth_clamp' : { 'arbnumber' : 61, 'flags' : { 'public' }, 'url'", "'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' : 149, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : { 'number' : 338, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' :", "{ 'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : { 'number' : 295,", "{ 'esnumber' : 147, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', },", "}, 'GL_ARB_uniform_buffer_object' : { 'arbnumber' : 57, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd'", ": 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : { 'arbnumber' : 134, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt',", "'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : { 'number' : 152, 'flags' : { 'incomplete' },", "'arbnumber' : 51, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'GL_SGIX_mpeg1' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : { 'number' :", "'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt',", "{ 'number' : 443, 'esnumber' : 164, 'flags' : { 'public' }, 'url'", "106, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : { 'esnumber' : 268,", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, },", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method'", ": { 'number' : 184, 'flags' : { 'incomplete', 'public' }, 'supporters' :", ": 308, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "{ 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : { 'flags' :", "{ 'number' : 407, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'GL_NV_packed_float' : { 'esnumber' : 127, 'flags' : { 'public' }, 'url' :", "313, 'flags' : { 'public' }, 'supporters' : { '3DL' }, 'url' :", "'GLX_MESA_pixmap_colormap' : { 'number' : 216, 'flags' : { 'public' }, 'supporters' :", "306, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : { 'arbnumber' : 164,", ": 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : { 'number' : 51, 'flags' : { 'public'", ": { 'number' : 144, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt',", ": { 'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : { 'esnumber' :", "18, 'flags' : { 'public' }, 'supporters' : { 'ES', 'SGI' }, 'url'", "91, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt',", ": { 'number' : 392, 'flags' : { 'public' }, 'supporters' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : { 'number' : 128, 'flags'", "'supporters' : { '3DFX', '3DL', 'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' :", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : {", "{ 'number' : 235, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' :", "26, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'number' : 305, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 62, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' :", ": 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : { 'number' : 360, 'esnumber' : 50, 'flags'", "'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : { 'number' : 226, 'flags' : { 'public' },", "'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : { 'number' : 382, 'flags'", "'GL_SGIS_clip_band_hint' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range'", "188, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : {", "}, 'GL_AMD_transform_feedback4' : { 'number' : 450, 'flags' : { 'public' }, 'url'", ": { 'number' : 474, 'esnumber' : 261, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering'", "'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : { 'number' : 349,", "'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' :", "'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : { 'number' : 455, 'flags' : { 'public' },", "}, 'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : { 'number' : 166, 'flags' :", "121, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : {", "'GL_ARB_texture_barrier' : { 'arbnumber' : 167, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt',", "{ 'public' }, 'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : { 'number' : 432,", "'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : { 'esnumber' : 207, 'flags' : {", "}, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : { 'esnumber' : 221, 'flags' :", "'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : { 'number' : 425, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : {", "'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : { 'arbnumber' : 24, 'flags'", "}, 'GL_NV_vertex_program' : { 'number' : 233, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : { 'esnumber' : 216,", "}, 'GL_SGIX_fragment_lighting_space' : { 'number' : 118, 'flags' : { 'incomplete' }, 'supporters'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt', 'alias' : {", "'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : { 'number' : 448, 'flags'", ": { 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt', },", "}, 'GL_NV_texture_compression_vtc' : { 'number' : 228, 'flags' : { 'public' }, 'supporters'", "'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : { 'number' : 111, 'flags'", "'GL_EXT_draw_range_elements' : { 'number' : 112, 'flags' : { 'public' }, 'supporters' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : { 'number' : 346,", ": 115, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": { 'esnumber' : 64, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt',", ": 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : { 'number' : 241, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : { 'esnumber' : 125, 'flags' : {", "'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : { 'number' : 445, 'flags' : {", "'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : { 'number' : 255, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : { 'esnumber' :", "'WGL_NV_DX_interop2' : { 'number' : 412, 'flags' : { 'public' }, 'supporters' :", "'GL_APPLE_copy_texture_levels' : { 'esnumber' : 123, 'flags' : { 'public' }, 'url' :", ": { 'IBM', 'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : { 'esnumber'", ": 414, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : {", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' :", "'esnumber' : 213, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation'", "}, 'GL_AMD_vertex_shader_viewport_index' : { 'number' : 416, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : { 'number' : 137, 'flags' : {", ": { 'number' : 345, 'flags' : { 'public' }, 'supporters' : {", "'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : { 'esnumber' : 203, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' :", ": { 'arbnumber' : 133, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt',", "139, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : {", "}, 'GL_ARB_texture_cube_map' : { 'arbnumber' : 7, 'flags' : { 'public' }, 'supporters'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : { 'number' :", "'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : { 'number' : 88, 'flags'", ": 189, 'esnumber' : 249, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt',", "'GL_EXT_x11_sync_object' : { 'number' : 406, 'flags' : { 'public' }, 'supporters' :", "'number' : 216, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' :", "}, 'GL_PGI_misc_hints' : { 'number' : 77, 'flags' : { 'public' }, 'supporters'", "'arbnumber' : 179, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query'", "{ 'IBM', 'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : { 'esnumber' :", "'GL_NV_blend_square' : { 'number' : 194, 'flags' : { 'public' }, 'supporters' :", "'GLX_ARB_robustness_application_isolation' : { 'arbnumber' : 142, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : { 'number' : 440, 'esnumber' : 99,", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt',", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', },", "}, 'GL_NV_copy_depth_to_color' : { 'number' : 243, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : { 'esnumber' : 257,", "'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : { 'number' : 234, 'flags' : {", "}, 'GL_SGIX_fog_scale' : { 'number' : 161, 'flags' : { 'incomplete' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt',", "}, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : { 'number'", ": 505, 'esnumber' : 282, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt',", ": 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : { 'number' : 47, 'flags' : { 'public'", ": 460, 'esnumber' : 252, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt',", "'number' : 330, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : { 'esnumber' : 15, 'flags'", "'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : {", "}, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : { 'number' : 24, 'flags' :", "'GL_EXT_framebuffer_object' : { 'number' : 310, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 285, 'flags' : { 'public' }, 'supporters' : {", ": 138, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN' },", ": 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : { 'number' : 388, 'flags' : { 'public'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : {", "{ 'number' : 37, 'esnumber' : 65, 'flags' : { 'public' }, 'supporters'", "'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : { 'number' : 297, 'flags' : { 'public' },", "'GL_EXT_shader_texture_lod' : { 'esnumber' : 77, 'flags' : { 'public' }, 'url' :", "'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : { 'number' : 338, 'flags' : { 'public' },", "}, 'WGL_NV_gpu_affinity' : { 'number' : 355, 'flags' : { 'public' }, 'supporters'", "145, 'flags' : { 'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND' },", "{ 'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : { 'number' : 446,", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt',", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : {", ": { 'esnumber' : 206, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt',", ": { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt',", ": 296, 'esnumber' : 16, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : { 'number'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map'", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' :", "122, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : {", "{ 'number' : 510, 'esnumber' : 285, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : { 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt',", "}, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : { 'arbnumber' : 128, 'flags' :", "'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : { 'flags' : { 'incomplete',", "{ 'incomplete', 'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension which is", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : { 'number' : 341,", "{ 'arbnumber' : 84, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', },", ": { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : { 'arbnumber' :", ": { 'arbnumber' : 184, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt',", "'GL_QCOM_driver_control' : { 'esnumber' : 55, 'flags' : { 'public' }, 'url' :", ": { 'esnumber' : 47, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt',", "}, 'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : { 'esnumber' : 215, 'flags' :", ": 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : { 'number' : 454, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', },", "'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that the OpenGL extension", "not needed - see arbnumber 75.', }, 'GLX_ARB_create_context_no_error' : { 'arbnumber' : 191,", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor'", "244, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", ": 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : { 'esnumber' : 151, 'flags' : { 'public'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : { 'esnumber' : 159,", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : { 'esnumber' :", "'number' : 35, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR',", "{ 'esnumber' : 254, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', },", ": 508, 'esnumber' : 284, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt',", "}, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : { 'esnumber' : 124, 'flags' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', },", ": { 'HP', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate'", "'GL_SGIS_pixel_texture' : { 'number' : 15, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : { 'esnumber' : 63, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : { 'flags' : {", "'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : { 'number' : 494, 'flags'", "346, 'esnumber' : 198, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : { 'number' : 309, 'esnumber' : 49, 'flags'", "'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : { 'esnumber' : 257, 'flags'", ": { 'number' : 277, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : { 'number'", "'GLX_NV_swap_group' : { 'number' : 350, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : { 'number'", ": { 'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' :", "}, 'supporters' : { 'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : {", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : { 'number'", "}, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : { 'arbnumber' : 2, 'flags' :", ": { 'arbnumber' : 77, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt',", "'GL_EXT_provoking_vertex' : { 'number' : 364, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : { 'number' : 423, 'flags' : {", "'esnumber' : 219, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture'", "'HP', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : { 'flags' : {", ": 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : { 'number' : 255, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : { 'number'", "}, 'GL_NV_texture_expand_normal' : { 'number' : 286, 'flags' : { 'public' }, 'supporters'", "'number' : 374, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'number' : 162, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", ": 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : { 'esnumber' : 162, 'flags' : { 'public'", "}, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : { 'number' : 46, 'flags' :", "209, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', },", "'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' : { 'number' : 278, 'flags' : { 'public'", "}, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : { 'esnumber' : 109, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt',", "'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : { 'number' : 148, 'esnumber' : 69, 'flags' :", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : { 'esnumber'", "'GL_AMD_query_buffer_object' : { 'number' : 420, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt', },", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt',", "}, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber' : 166, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt',", ": { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : { 'number' :", "{ 'esnumber' : 245, 'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt',", "'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : { 'esnumber' : 209, 'flags' : {", "'WGL_NV_video_output' : { 'number' : 349, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_NV_gpu_program5' : { 'number' : 388, 'flags' : { 'public' }, 'supporters'", ": 142, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : { 'number' : 445, 'flags'", ": 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : { 'esnumber' : 44, 'flags' : { 'public'", "92, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "142, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'arbnumber' : 63, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', },", "{ 'esnumber' : 211, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt', },", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt',", "{ 'number' : 444, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', },", ": 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : { 'number' : 151, 'flags' : { 'incomplete'", "'GL_NV_fragment_program_option' : { 'number' : 303, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : {", ": 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : { 'esnumber' : 128, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : { 'arbnumber' : 51, 'flags' : {", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', },", "}, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : {", "'number' : 80, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber'", "56, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to", ": 300, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : { 'flags' : { 'incomplete' },", "'GL_NV_clip_space_w_scaling' : { 'number' : 486, 'esnumber' : 295, 'flags' : { 'public'", "'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32' }, },", "326, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "251, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : {", "}, 'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : { 'esnumber' : 11, 'flags' :", ": 374, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_ARB_sparse_buffer' : { 'arbnumber' : 172, 'flags' : { 'public' }, 'url'", "'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' : { 'number' : 23, 'flags' : {", "'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : { 'number' : 388, 'flags' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : { 'number' :", "}, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' :", "'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : { 'number' : 39, 'flags' : { 'public' },", "'GL_EXT_vertex_array_set' : { 'flags' : { 'public' }, 'supporters' : { 'IBM' },", "'GL_ARB_post_depth_coverage' : { 'arbnumber' : 180, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt',", "'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : { 'esnumber' : 70, 'flags'", ": 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : { 'arbnumber' : 90, 'flags' : { 'public'", "'esnumber' : 200, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' :", "'GL_AMD_texture_texture4' : { 'number' : 362, 'flags' : { 'public' }, 'supporters' :", "'GL_NV_half_float' : { 'number' : 283, 'flags' : { 'public' }, 'supporters' :", "'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber' : 63, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : { 'number' : 180, 'flags'", "'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : { 'number' : 55, 'flags' : { 'public' },", "114, 'flags' : { 'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : {", "{ 'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : { 'number' : 448,", ": 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : { 'esnumber' : 278, 'flags' : { 'public'", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : {", ": 333, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : { 'esnumber' :", "{ 'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', },", "{ 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' :", ": { 'esnumber' : 219, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt',", "}, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' : 99, 'flags' :", "}, 'GL_INTEL_conservative_rasterization' : { 'number' : 491, 'esnumber' : 265, 'flags' : {", "}, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' : 188, 'flags' : { 'public' }, 'url'", "'number' : 277, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "}, 'GL_NV_vertex_program4' : { 'number' : 325, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : { 'number' : 347,", "{ 'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : { 'esnumber' : 180,", "}, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber' : 171, 'flags' :", "'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : { 'arbnumber' : 12, 'flags' : { 'public' },", "'GL_IBM_multimode_draw_arrays' : { 'number' : 200, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 150, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers'", ": { 'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : { 'esnumber' :", ": { 'number' : 283, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : { 'number' : 316, 'flags' : { 'public'", "'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : { 'esnumber' : 21, 'flags' : {", "}, 'GL_EXT_texture_sRGB_R8' : { 'esnumber' : 221, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : { 'flags' : { 'incomplete' }, 'url'", "'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : { 'arbnumber' : 120, 'flags' : { 'public' },", "35, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear'", "'GL_ARB_multitexture' : { 'arbnumber' : 1, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : { 'number' : 111, 'flags' : { 'public'", "}, 'GL_SGIS_texture_border_clamp' : { 'number' : 36, 'flags' : { 'public' }, 'supporters'", "210, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber' : 106, 'flags' : { 'public' },", ": 290, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "{ 'esnumber' : 27, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', },", "}, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : { 'arbnumber' : 12, 'flags' :", "'3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : { 'number' : 404, 'flags'", "}, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber' : 129, 'flags' : { 'public' }, 'url'", "'GL_SGIX_occlusion_instrument' : { 'number' : 151, 'flags' : { 'incomplete' }, 'supporters' :", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : { 'number' : 435,", ": { 'esnumber' : 209, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt',", ": 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : { 'flags' : { 'incomplete' }, 'url' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : { 'number'", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', },", "'GL_EXT_render_snorm' : { 'esnumber' : 206, 'flags' : { 'public' }, 'url' :", "{ 'number' : 303, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'url' : 'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' : {", ": { 'number' : 486, 'esnumber' : 295, 'flags' : { 'public' },", ": 65, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' :", ": 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' : { 'arbnumber' : 40, 'flags' : { 'public'", "{ 'arbnumber' : 52, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "{ 'incomplete', 'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : { 'number' :", "159, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : {", "'number' : 78, 'flags' : { 'public' }, 'supporters' : { 'MS', 'SGI'", "{ 'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : { 'esnumber' : 81,", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : { 'number'", ": 'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : { 'number' : 224,", ": 376, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc'", "85, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : {", "}, 'GL_NV_vertex_attrib_integer_64bit' : { 'number' : 392, 'flags' : { 'public' }, 'supporters'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' : { 'number' :", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes'", "'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : { 'number'", ": 87, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' :", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : { 'number' : 290,", "{ 'DEC', 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra'", "}, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt',", "'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : { 'number' : 457, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : { 'number' : 307, 'flags' :", "71, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : {", "'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : { 'number' : 322, 'flags' : { 'public' },", ": { 'number' : 370, 'flags' : { 'public' }, 'supporters' : {", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float',", "'arbnumber' : 11, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": 166, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' :", "}, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : { 'number' : 264, 'flags' :", "{ 'number' : 168, 'flags' : { 'public' }, 'supporters' : { 'INGR',", "}, 'GL_EXT_conservative_depth' : { 'esnumber' : 268, 'flags' : { 'public' }, 'url'", "'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : { 'number' : 428, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : { 'arbnumber' : 7, 'flags' : {", ": { 'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : { 'esnumber' :", "}, 'GL_EXT_render_snorm' : { 'esnumber' : 206, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : { 'arbnumber'", "'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : { 'arbnumber' : 5, 'flags' : {", "'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : { 'number' : 417, 'flags'", ": { 'number' : 194, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' :", ": 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' : 59, 'flags' : { 'public'", "'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : { 'number' : 253, 'flags'", "{ 'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : { 'number' : 420,", "by some other vendor extensions, but shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : {", "44, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'SGI' },", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : { 'number'", "151, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : { 'arbnumber' : 165, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt',", "'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' : { 'number' : 192, 'flags' : { 'public'", ": 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : { 'flags' : { 'incomplete' }, 'url' :", "}, 'WGL_OML_sync_control' : { 'number' : 242, 'flags' : { 'public' }, 'supporters'", "{ 'arbnumber' : 159, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', },", ": 153, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' :", "'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : { 'esnumber' :", "}, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : { 'flags' : { 'incomplete' },", ": { 'arbnumber' : 72, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt',", ": 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : { 'number' : 6, 'flags' : { 'public'", ": { 'number' : 95, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : { 'esnumber' :", "401, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'GL_ARB_draw_buffers_blend' : { 'arbnumber' : 69, 'flags' : { 'public' }, 'url'", "242, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt',", ": { 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' : { 'number' : 72, 'flags' :", "'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : { 'number' : 141, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : { 'arbnumber' : 111, 'flags' : { 'public'", ": 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : { 'number' : 131, 'flags' : { 'incomplete'", "'GL_ARB_texture_query_lod' : { 'arbnumber' : 73, 'flags' : { 'public' }, 'url' :", ": { 'arbnumber' : 100, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt',", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt', },", "'number' : 446, 'flags' : { 'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers'", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : {", "{ 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop'", ": { 'arbnumber' : 131, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt',", "'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : { 'esnumber' : 129, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : { 'number' : 312, 'flags' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : { 'arbnumber' : 49,", "103, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : {", "102, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : {", "{ 'number' : 324, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'GL_NV_fragment_coverage_to_color' : { 'number' : 467, 'esnumber' : 229, 'flags' : { 'public'", "'number' : 208, 'flags' : { 'public' }, 'supporters' : { '3DFX' },", "'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : { 'number' : 299, 'flags'", ": 299, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : { 'number' : 52,", "'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : { 'number' : 474, 'esnumber' : 261, 'flags' :", "}, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : { 'number' : 412, 'flags' :", "'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : { 'number' : 143, 'flags' : { 'incomplete' },", "{ 'number' : 401, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "103, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' :", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : { 'arbnumber'", "'number' : 217, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', },", "'GL_EXT_multisampled_render_to_texture' : { 'esnumber' : 106, 'flags' : { 'public' }, 'url' :", "{ 'HP', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : { 'flags' :", ": { 'esnumber' : 114, 'flags' : { 'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : {", "'number' : 449, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store'", "'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : { 'number' : 17, 'flags'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : { 'arbnumber' : 53, 'flags'", "}, 'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : { 'number' : 301, 'flags' :", "'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : { 'number' : 287, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : { 'esnumber' : 21, 'flags'", "'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : { 'number' : 17, 'flags' : {", "'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber' : 114, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_resample.txt', },", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : { 'flags' : { 'incomplete'", ": { 'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : { 'arbnumber' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : { 'number'", ": 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' : 130, 'flags' : { 'public'", "}, 'GL_MESA_tile_raster_order' : { 'number' : 515, 'esnumber' : 292, 'flags' : {", ": 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : { 'arbnumber' : 41, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber' : 171, 'flags'", "{ 'esnumber' : 132, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt', },", "'arbnumber' : 33, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "{ 'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : { 'esnumber' : 8,", "'GL_EXT_sparse_texture2' : { 'number' : 463, 'esnumber' : 259, 'flags' : { 'public'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' :", "'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt',", "{ 'arbnumber' : 193, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'GL_SUN_slice_accum' : { 'number' : 258, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 393, 'flags' : { 'public' }, 'supporters' : {", "260, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : { 'arbnumber' :", "{ 'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : { 'number' : 117,", "'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : { 'number' : 75, 'flags' : { 'public' },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : {", ": { 'arbnumber' : 68, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt',", ": { 'arbnumber' : 6, 'flags' : { 'public' }, 'supporters' : {", ": 203, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' :", "'arbnumber' : 145, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control'", ": 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' : { 'arbnumber'", ": 142, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : {", "'supporters' : { 'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : { 'flags'", "'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : { 'number' : 201, 'flags' : {", "'number' : 413, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : { 'esnumber' : 182, 'flags' : { 'public'", "'number' : 350, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : { 'esnumber'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : {", ": { 'arbnumber' : 150, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt',", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : { 'flags' : { 'incomplete'", "'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : { 'arbnumber' : 152, 'flags' : { 'public' },", ": { 'arbnumber' : 152, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : { 'arbnumber'", "}, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : { 'arbnumber' : 73, 'flags' :", ": 298, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'number' : 69, 'flags' : { 'public' }, 'supporters' : {", "'GL_AMD_stencil_operation_extended' : { 'number' : 413, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension spec with WGL_ARB_create_context_no_error.', 'alias' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : { 'number' : 31,", "'number' : 336, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : { 'number'", "}, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : { 'arbnumber' : 6, 'flags' :", "'GL_ARB_texture_float' : { 'arbnumber' : 41, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt', },", "'GL_EXT_texture_compression_s3tc' : { 'number' : 198, 'esnumber' : 154, 'flags' : { 'public'", ": 494, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB'", "'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' : { 'incomplete' }, 'url'", "'number' : 254, 'flags' : { 'public' }, 'supporters' : { 'I3D' },", "}, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : { 'arbnumber' : 122, 'flags' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt', },", "'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : { 'number' : 41, 'flags' : {", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber' :", "265, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' : { 'number' : 98, 'flags' :", ": 253, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' :", "{ 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft", ": 82, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' :", "'alias' : { 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' : { 'number' : 504, 'esnumber'", "}, 'GL_SGIX_calligraphic_fragment' : { 'number' : 82, 'flags' : { 'incomplete' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber' :", "'number' : 454, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader'", ": { 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' : { 'arbnumber' : 126, 'flags' :", "{ 'arbnumber' : 98, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt', },", "'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : {", "99, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number' : 456, 'flags' : { 'public' }, 'supporters'", "'number' : 368, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "{ 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering'", "{ 'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : { 'esnumber' : 143,", "'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : { 'number' : 250, 'flags' : {", "{ 'number' : 206, 'flags' : { 'public' }, 'supporters' : { '3DFX'", "'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : { 'arbnumber' : 28, 'flags' : {", ": 222, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : { 'arbnumber'", "'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' : { 'arbnumber' : 168, 'esnumber' : 191, 'flags'", "'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : { 'number' : 515, 'esnumber'", "{ 'arbnumber' : 152, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', },", ": 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : { 'flags' : { 'incomplete' }, 'url' :", "}, 'GL_NV_texture_compression_latc' : { 'esnumber' : 130, 'flags' : { 'public' }, 'url'", "}, 'GL_EXT_fragment_lighting' : { 'number' : 102, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker' },", ": { 'arbnumber' : 14, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation' }, },", "'GL_NV_video_capture' : { 'number' : 374, 'flags' : { 'public' }, 'supporters' :", "147, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : {", ": 77, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' :", "214, 'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' :", "'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : { 'number' : 491, 'esnumber'", "31, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : {", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : {", "{ 'arbnumber' : 64, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', },", "'GLX_OML_sync_control' : { 'number' : 238, 'flags' : { 'public' }, 'supporters' :", "166, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : { 'esnumber' :", "{ 'number' : 20, 'flags' : { 'public' }, 'supporters' : { 'IBM',", ": 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : { 'number' : 306, 'flags' : { 'public'", ": 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : { 'esnumber' : 148, 'flags' : { 'public'", ": 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : { 'number' : 364, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : { 'number' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : { 'number' :", "'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : { 'number' : 261, 'flags' : { 'public' },", "4, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'esnumber' : 201, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', },", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : { 'number' : 28, 'flags'", "}, 'GL_SGIX_vertex_array_object' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber'", "'number' : 444, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax'", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : {", "{ 'esnumber' : 128, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', },", "'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : { 'esnumber' : 10, 'flags' : {", "275, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : {", "}, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : { 'esnumber' : 70, 'flags' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' :", "'arbnumber' : 133, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample'", "'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : { 'arbnumber' : 117, 'flags' : { 'public' },", "'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : { 'number' : 483, 'esnumber' : 258, 'flags' :", "'arbnumber' : 161, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth'", "60, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : { 'esnumber' : 61,", "}, 'GLX_SGIX_color_type' : { 'number' : 89, 'flags' : { 'incomplete' }, 'supporters'", "}, 'GL_EXT_primitive_bounding_box' : { 'esnumber' : 186, 'flags' : { 'public' }, 'url'", "510, 'esnumber' : 285, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'number' : 145, 'flags' : { 'public' }, 'supporters' : { '3DFX',", "'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : { 'esnumber' : 192, 'flags'", "'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : { 'arbnumber' : 70, 'flags' : {", "'GL_SGIX_igloo_interface' : { 'number' : 219, 'flags' : { 'incomplete' }, 'supporters' :", "'arbnumber' : 97, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops'", "{ 'number' : 173, 'flags' : { 'public' }, 'supporters' : { 'IBM',", "'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : { 'number' : 405, 'flags' : {", ": 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' : { 'number' :", "{ 'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : { 'number' : 276,", "{ 'number' : 359, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : { 'esnumber' : 125, 'flags'", "'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' : { 'incomplete',", "'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : { 'number' : 382, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader'", "'GL_OES_draw_buffers_indexed' : { 'esnumber' : 209, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : { 'number' :", "'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' : 130, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : { 'number' :", ": { 'arbnumber' : 4, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 42, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view'", "{ 'number' : 404, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "}, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : { 'arbnumber' : 186, 'flags' :", ": 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : { 'esnumber' : 121, 'flags' : { 'public'", ": 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : { 'esnumber' : 12, 'flags' : { 'public'", "'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : { 'number' : 395, 'flags'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : {", ": 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' : { 'flags' :", "39, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : {", "'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : { 'number' : 54, 'flags' : {", "}, 'GL_EXT_vertex_array' : { 'number' : 30, 'flags' : { 'public' }, 'supporters'", "430, 'esnumber' : 126, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "116, 'flags' : { 'obsolete' }, 'supporters' : { 'SGI' }, 'url' :", "'GL_SGIX_depth_texture' : { 'number' : 63, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : {", "'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : { 'arbnumber' : 73, 'flags' : { 'public' },", "156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : {", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : { 'esnumber'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : { 'number' :", "to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : { 'esnumber' : 184, 'flags' : { 'public'", "}, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : { 'number' : 499, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : { 'arbnumber' : 116,", "}, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : { 'number' : 139, 'flags' :", "'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : { 'number' : 313, 'flags'", "273, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' : 149, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' :", "}, }, 'WGL_ATI_pixel_format_float' : { 'number' : 278, 'flags' : { 'public' },", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : { 'number'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint'", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber'", "{ 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : { 'number' : 252,", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : { 'number'", "}, 'GL_WIN_phong_shading' : { 'number' : 113, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : { 'number' : 468, 'esnumber' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt', },", ": 492, 'esnumber' : 266, 'flags' : { 'public' }, 'supporters' : {", ": { 'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number' :", "'GL_ARB_shader_group_vote' : { 'arbnumber' : 157, 'flags' : { 'public' }, 'url' :", "}, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' : 137, 'flags' : { 'public' }, 'url'", "{ 'number' : 189, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'number' : 128, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'GL_ARB_transform_feedback2' : { 'arbnumber' : 93, 'flags' : { 'public' }, 'url' :", ": { 'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number' :", "'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : { 'arbnumber' : 29, 'flags' : {", "'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' : 110, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : {", "'number' : 91, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : { 'arbnumber' : 90, 'flags' : { 'public' },", ": 346, 'esnumber' : 198, 'flags' : { 'public' }, 'supporters' : {", ": { 'arbnumber' : 54, 'flags' : { 'public' }, 'supporters' : {", "{ 'IBM', 'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : { 'number' :", "'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : { 'number' : 387, 'flags'", "{ 'number' : 335, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'arbnumber' : 167, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp'", "'esnumber' : 40, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth'", "'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : { 'esnumber' : 10, 'flags'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : { 'number'", "'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : { 'esnumber' : 71, 'flags' : { 'public' },", "'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : { 'arbnumber' : 55, 'flags' : {", "}, 'WGL_NV_video_output' : { 'number' : 349, 'flags' : { 'public' }, 'supporters'", "240, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : {", "}, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : { 'arbnumber' : 157, 'flags' :", "}, 'GL_EXT_sRGB' : { 'esnumber' : 105, 'flags' : { 'public' }, 'url'", "{ 'number' : 133, 'flags' : { 'incomplete', 'public' }, 'supporters' : {", ": 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : { 'arbnumber' : 128, 'flags' : { 'public'", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt',", "'GL_ARB_base_instance' : { 'arbnumber' : 107, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI', 'SUN' }, 'url' :", "'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : { 'esnumber' : 180, 'flags' : { 'public' },", "}, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : { 'esnumber' : 10, 'flags' :", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : {", "{ 'esnumber' : 37, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt', },", ": { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : { 'number' :", ": { 'arbnumber' : 190, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt',", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32' },", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', },", ": { 'number' : 477, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_command_list.txt',", ": 366, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' },", ": { 'number' : 417, 'flags' : { 'public' }, 'supporters' : {", "'GL_NV_fbo_color_attachments' : { 'esnumber' : 92, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : { 'number' : 227, 'flags' :", "}, 'GL_SGIX_texture_multi_buffer' : { 'number' : 53, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : { 'esnumber'", "}, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : { 'flags' : { 'incomplete' },", "'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : { 'esnumber' : 20, 'flags'", "{ 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : { 'number' : 440,", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt',", "}, 'GLX_SGIX_pbuffer' : { 'number' : 50, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : { 'number' : 187, 'esnumber' : 41,", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : {", "'number' : 179, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : { 'arbnumber' : 100, 'flags' : {", "}, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : { 'flags' : { 'incomplete' },", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt', },", ": 252, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : { 'arbnumber' :", "'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : { 'number' : 305, 'flags' : {", "'esnumber' : 79, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level'", "}, 'GL_SUN_triangle_list' : { 'number' : 165, 'flags' : { 'public' }, 'supporters'", ": 184, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' :", ": 445, 'flags' : { 'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' :", "'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : { 'number' : 89, 'flags' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : { 'number'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : {", "'flags' : { 'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : { 'number'", ": 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : { 'arbnumber' : 183, 'flags' : { 'public'", "{ 'arbnumber' : 173, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', },", "192, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'esnumber' : 269, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance'", ": { 'esnumber' : 139, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt',", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : { 'number' : 343, 'flags'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared extension number 15", "}, 'GL_NV_sample_mask_override_coverage' : { 'number' : 473, 'esnumber' : 236, 'flags' : {", "'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : { 'number' : 398, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : { 'number' : 150,", ": { 'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : { 'number' :", "'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : { 'number' : 446, 'flags' : { 'public' },", "90, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url'", "'number' : 337, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'flags' : { 'public' }, 'supporters' : { 'IBM', 'SGI' }, 'url' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size' }, },", ": 12, 'flags' : { 'public' }, 'supporters' : { 'HP', 'KGC', 'SGI',", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation'", "'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number' : 397, 'flags' : {", ": { 'esnumber' : 131, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt',", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt',", "{ 'number' : 497, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : { 'number'", ": 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : { 'number' : 474, 'esnumber' : 261, 'flags'", "50, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "{ 'arbnumber' : 6, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt', },", ": { 'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : { 'number' :", "'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : { 'number' : 175, 'flags' : { 'public' },", "'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : { 'number' : 181, 'flags' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : { 'number'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event'", ": 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : { 'number' : 201, 'flags' : { 'public'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : { 'number'", ": { 'public' }, 'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', },", "'GL_SGIX_spotlight_cutoff' : { 'number' : 131, 'flags' : { 'incomplete' }, 'supporters' :", "3, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : {", "'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : { 'esnumber' : 221, 'flags' : { 'public' },", "'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' : 174, 'flags' : { 'public' },", "{ 'arbnumber' : 162, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt', },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : { 'number' :", ": { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : { 'number' :", ": 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : { 'number' : 229, 'flags' : { 'public'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : { 'flags' : {", ": { 'esnumber' : 153, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt',", "'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP support.', },", "'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : { 'number' : 139, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : { 'flags'", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : { 'number'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : { 'arbnumber' : 147,", "'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : { 'number' : 320, 'flags'", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : { 'number' : 372,", ": { 'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : { 'number' :", ": { 'number' : 64, 'flags' : { 'public' }, 'supporters' : {", ": 47, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": { 'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : { 'esnumber' :", ": 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : { 'number' : 178, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : { 'number'", "'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt',", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' :", "{ 'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : { 'number' : 429,", "{ 'esnumber' : 11, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt', },", "{ 'number' : 211, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": 114, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' :", ": 480, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : { 'number' : 50, 'flags'", "'GL_ANGLE_instanced_arrays' : { 'esnumber' : 109, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : {", ": { 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : { 'number'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : { 'number' :", ": 123, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' :", "3, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC',", "{ 'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : { 'number' : 175,", ": 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : { 'number' : 214, 'flags' : { 'incomplete',", ": { 'esnumber' : 116, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt',", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', },", ": 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : { 'arbnumber' : 36, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : { 'number' : 263, 'flags' :", "{ 'number' : 180, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", ": { 'esnumber' : 46, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt',", "'number' : 385, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : { 'flags' : { 'incomplete' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : { 'number' : 3, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt',", "'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : { 'number' : 349, 'flags' : { 'public' },", ": 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : { 'number' : 86, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt',", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt',", "'flags' : { 'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd' }, },", "'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : { 'number' : 47, 'flags' : { 'public' },", "465, 'esnumber' : 228, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt', },", "'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt',", ": 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : { 'number' : 184, 'flags' : { 'incomplete',", "'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : { 'esnumber'", "517, 'esnumber' : 297, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', },", "'APPLE', 'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : { 'number' : 279,", "'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : { 'number' : 399, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : { 'number' :", "316, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt',", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : { 'number' : 81, 'flags'", "115, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : {", "'flags' : { 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI', 'SUN' },", "'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : { 'number' : 393, 'flags' : { 'public' },", "167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : {", "56, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url'", "{ 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : { 'number' :", "24, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : { 'arbnumber'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber' :", "'esnumber' : 151, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2'", "'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : { 'number' : 378, 'flags' : { 'public' },", "'number' : 62, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": 242, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "}, 'GL_OES_fragment_precision_high' : { 'esnumber' : 28, 'flags' : { 'public' }, 'url'", "'number' : 459, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop'", "}, 'GL_ARB_shadow_ambient' : { 'arbnumber' : 24, 'flags' : { 'public' }, 'supporters'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : { 'arbnumber' : 16,", "'number' : 38, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM',", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage'", "'GL_EXT_protected_textures' : { 'esnumber' : 256, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer'", ": 95, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' :", "'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' : 195, 'flags' : { 'public' },", "'GL_OES_single_precision' : { 'number' : 293, 'esnumber' : 18, 'flags' : { 'public'", "}, 'GL_APPLE_vertex_array_range' : { 'number' : 274, 'flags' : { 'public' }, 'supporters'", "'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' : 118, 'esnumber' : 117, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : { 'arbnumber' : 31, 'flags' :", "'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : { 'number' : 502, 'flags' : { 'public' },", ": 455, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : { 'number' : 351, 'flags' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : { 'arbnumber'", "'esnumber' : 266, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', },", ": 483, 'esnumber' : 258, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt',", "'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' : 151, 'flags' : { 'public' }, 'url' :", ": 'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' : { 'number'", ": 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : { 'number' : 448, 'flags' : { 'public'", ": 183, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' :", "'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : { 'number' : 135, 'flags' : {", ": { 'arbnumber' : 11, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : { 'number' : 151, 'flags' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : { 'esnumber' : 13,", ": 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : { 'arbnumber' : 141, 'flags' : { 'public'", ": { 'arbnumber' : 114, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt',", "'GL_NV_coverage_sample' : { 'esnumber' : 72, 'flags' : { 'public' }, 'url' :", "{ 'number' : 27, 'flags' : { 'public' }, 'supporters' : { 'IBM',", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : { 'arbnumber' :", "'supporters' : { 'ES', 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel'", "'number' : 415, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : { 'number' : 491, 'esnumber' : 265,", "'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' : { 'number'", ": 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : { 'number' : 461, 'esnumber' : 225, 'flags'", ": { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt',", ": { 'number' : 337, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt',", "}, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' : 195, 'flags' : { 'public' }, 'supporters'", ": 79, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' :", "'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : { 'arbnumber' : 6, 'flags'", "{ 'number' : 450, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render'", "'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : { 'arbnumber' : 186, 'flags' : {", "}, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : { 'number' : 152, 'flags' :", ": { 'arbnumber' : 7, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : { 'number' :", "}, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : { 'esnumber' : 55, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber'", "}, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : { 'number' : 489, 'esnumber' :", "}, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : { 'arbnumber' : 45, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : { 'number' : 250, 'flags' :", "'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : {", "}, 'GL_EXT_shader_framebuffer_fetch' : { 'number' : 520, 'esnumber' : 122, 'flags' : {", "'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : { 'number' : 521, 'esnumber' : 300,", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : {", "64, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'public' }, 'supporters' : { 'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map'", "'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : { 'number' : 215, 'flags'", "'esnumber' : 61, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated'", "}, 'GL_OES_copy_image' : { 'esnumber' : 208, 'flags' : { 'public' }, 'url'", "'number' : 227, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_EXT_vertex_array_set' : { 'flags' : { 'public' }, 'supporters' : { 'IBM'", ": 112, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : { 'esnumber'", "'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : { 'esnumber'", "}, 'GL_ATI_element_array' : { 'number' : 256, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt',", ": 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : { 'number' : 235, 'flags' : { 'public'", "'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : { 'number' : 408, 'flags'", "'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : { 'number' : 461, 'esnumber' : 225, 'flags' :", "'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : { 'esnumber' : 203,", "}, 'WGL_EXT_multisample' : { 'number' : 209, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform'", "}, 'GL_NV_register_combiners' : { 'number' : 191, 'flags' : { 'public' }, 'supporters'", "'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : { 'esnumber' : 32, 'flags' : { 'public' },", ": 100, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' :", "193, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'number' : 215, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax'", "80, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : {", "}, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : { 'number' : 359, 'flags' :", "}, 'GL_NV_vertex_buffer_unified_memory' : { 'number' : 380, 'flags' : { 'public' }, 'supporters'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : { 'arbnumber' : 72,", ": { 'number' : 187, 'esnumber' : 41, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : { 'arbnumber' : 123,", "'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : { 'arbnumber' : 59, 'flags'", "'arbnumber' : 48, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : { 'number' : 375, 'flags' : {", ": 66, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' :", ": 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : { 'number' : 134, 'flags' : { 'incomplete',", "'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : { 'esnumber' : 2, 'flags' : {", "'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt',", ": { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' : { 'arbnumber' : 119, 'esnumber'", ": { 'esnumber' : 194, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt',", ": 487, 'esnumber' : 262, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt',", "'number' : 100, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'esnumber' : 170, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic'", ": { 'arbnumber' : 127, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt',", "'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : { 'esnumber' : 138, 'flags'", "}, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber' : 201, 'flags' : { 'public' }, 'url'", "'number' : 177, 'flags' : { 'public' }, 'supporters' : { 'INGR' },", "}, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : { 'flags' : { 'incomplete' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt',", ": 273, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' :", "'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : { 'number' : 219, 'flags' : { 'incomplete' },", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : {", "297, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'esnumber' : 156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', },", "'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : { 'number' : 141, 'flags' : {", ": { 'esnumber' : 287, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 32, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8'", "'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod'", "'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : {", "455, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' : 253, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : { 'esnumber' : 272, 'flags' :", "120, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : {", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', },", ": 148, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' :", ": 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : { 'number' : 488, 'flags' : { 'public'", ": 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : { 'esnumber' : 215, 'flags' : { 'public'", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : {", "'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : { 'number' : 204, 'flags' : { 'incomplete' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' :", "'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : { 'esnumber' : 143, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : { 'number' : 45,", "}, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : { 'esnumber' : 206, 'flags' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address'", "{ 'arbnumber' : 171, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', },", "}, 'GL_EXT_texture_env_add' : { 'number' : 185, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 93, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", "'GL_EXT_instanced_arrays' : { 'esnumber' : 156, 'flags' : { 'public' }, 'url' :", "'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : { 'number' : 129, 'flags' : { 'public' },", "'GL_ARB_fragment_shader_interlock' : { 'arbnumber' : 177, 'flags' : { 'public' }, 'url' :", "'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber' : 125, 'flags' : { 'public' },", "}, 'GL_EXT_index_texture' : { 'number' : 93, 'flags' : { 'public' }, 'supporters'", "}, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' : 46, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : { 'esnumber'", "{ 'number' : 87, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'Included with arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : { 'arbnumber' : 101, 'flags'", "'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : { 'number' : 2, 'flags' : {", "'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : { 'esnumber' : 150, 'flags' : { 'public' },", "{ 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt', },", ": 1, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' :", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt',", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : { 'number' :", ": { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', },", "'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : { 'number' : 79, 'flags' : {", "'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : { 'arbnumber' : 36, 'flags' : {", "'arbnumber' : 154, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays'", "'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : { 'number' : 333, 'flags'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind'", "'GL_EXT_texture_shared_exponent' : { 'number' : 333, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_EXT_provoking_vertex' : { 'number' : 364, 'flags' : { 'public' }, 'supporters'", ": 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : { 'number' : 12, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' : 110, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : { 'arbnumber'", ": { 'arbnumber' : 126, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt',", "{ 'esnumber' : 275, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', },", ": 131, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' :", "'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : { 'esnumber' : 162, 'flags' : {", "{ 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback'", "'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : { 'number' : 62, 'flags' : { 'public' },", ": 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : { 'arbnumber' : 174, 'esnumber' : 168, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : { 'arbnumber'", "}, 'GLX_MESA_release_buffers' : { 'number' : 217, 'flags' : { 'public' }, 'supporters'", "472, 'esnumber' : 235, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt', },", "}, 'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : { 'number' : 231, 'flags' :", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer'", "'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : { 'number' : 258, 'flags' : {", ": { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : { 'esnumber' :", ": { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' :", ": { 'arbnumber' : 86, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt',", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : { 'number'", "'comments' : 'Included with arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : { 'arbnumber' :", "'GL_ARB_shading_language_100' : { 'arbnumber' : 33, 'flags' : { 'public' }, 'supporters' :", ": 165, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' :", "'GL_NV_command_list' : { 'number' : 477, 'flags' : { 'public' }, 'url' :", "'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : { 'esnumber' : 1, 'flags' : {", ": 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : { 'number' : 511, 'flags' : { 'public'", "{ 'arbnumber' : 10, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "{ 'obsolete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : { 'arbnumber' : 82,", "'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : { 'number' : 112, 'flags' : { 'public' },", ": 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : { 'number' : 447, 'flags' : { 'public'", ": 321, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : { 'arbnumber' :", "276, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url'", "'esnumber' : 286, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber' : 143,", ": { 'arbnumber' : 109, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt',", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group'", "{ 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : { 'number' : 113,", "'GL_IMG_framebuffer_downsample' : { 'esnumber' : 255, 'flags' : { 'public' }, 'url' :", "'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : { 'number' :", ": { 'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : { 'esnumber' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample' },", ": { 'esnumber' : 216, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt',", "288, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : {", ": { 'number' : 439, 'esnumber' : 98, 'flags' : { 'public' },", "'GLU_EXT_object_space_tess' : { 'number' : 75, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 161, 'flags' : { 'incomplete' }, 'supporters' : {", "{ 'arbnumber' : 68, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', },", "{ 'number' : 115, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load'", ": 388, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : { 'number' : 406, 'flags' : {", "'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : { 'esnumber' : 244,", "192, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : { 'number'", ": { 'number' : 35, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : { 'number'", ": { 'number' : 139, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : { 'arbnumber' : 14, 'flags' : { 'public'", "'number' : 267, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : { 'esnumber' : 141, 'flags' : { 'public'", ": { 'arbnumber' : 71, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt',", "}, 'GL_ARB_window_pos' : { 'arbnumber' : 25, 'flags' : { 'public' }, 'supporters'", ": 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : { 'number' : 389, 'esnumber' : 260, 'flags'", "{ 'public' }, 'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : { 'esnumber' : 33,", ": 56, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : { 'number' : 57, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : { 'number' : 386,", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt',", ": 7, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' :", ": 171, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' :", "{ 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit'", "'number' : 115, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'alias' : { 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' : { 'esnumber' : 205, 'flags'", "needed - see arbnumber 75.', }, 'GLX_ARB_create_context_no_error' : { 'arbnumber' : 191, 'flags'", ": 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : { 'esnumber' : 238, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt',", "'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : { 'flags' : { 'incomplete' }, 'url'", "}, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : { 'number' : 25, 'flags' :", "'arbnumber' : 92, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range'", "}, 'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : { 'esnumber' : 120, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : { 'number' : 179, 'flags'", ": { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : { 'number'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : { 'number' :", "'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' : { 'esnumber' : 254, 'flags' : { 'public'", ": 129, 'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : {", "'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB'", "'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : { 'number' : 173, 'flags'", "'esnumber' : 245, 'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt', },", "}, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber' : 143, 'flags' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' :", "}, }, 'GL_EXT_texenv_op' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt',", "}, 'GL_EXT_occlusion_query_boolean' : { 'esnumber' : 100, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : { 'flags' : { 'incomplete' },", "{ 'number' : 428, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "}, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' : 289, 'flags' :", ": { 'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : { 'number' :", "'GL_OES_texture_compression_astc' : { 'esnumber' : 162, 'flags' : { 'public' }, 'url' :", "87, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'esnumber' : 36, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias'", ": { 'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : { 'number' :", "}, 'GL_ARB_shader_draw_parameters' : { 'arbnumber' : 156, 'flags' : { 'public' }, 'url'", ": { 'number' : 110, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 140, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function'", ": { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : { 'number' :", "'esnumber' : 233, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast'", ": 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : { 'arbnumber' : 49, 'flags' : { 'public'", "{ 'esnumber' : 244, 'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt',", "'GL_SGIX_sprite' : { 'number' : 52, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 177, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' :", "{ 'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : { 'esnumber' : 134,", "'GL_OES_surfaceless_context' : { 'esnumber' : 116, 'flags' : { 'public' }, 'url' :", ": 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : { 'number' : 66, 'flags' : { 'public'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : { 'number' : 310, 'flags'", "'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : { 'number' : 336, 'flags'", "'GL_SGIX_ycrcb_subsample' : { 'number' : 204, 'flags' : { 'incomplete' }, 'supporters' :", "'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : { 'number' : 312, 'flags'", ": 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : { 'number' : 164, 'flags' : { 'public'", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : { 'number' : 275,", "'arbnumber' : 183, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding'", ": { 'number' : 227, 'flags' : { 'public' }, 'supporters' : {", "'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : { 'esnumber' : 2, 'flags' : { 'public' },", "'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : { 'number' : 417, 'flags' : {", "}, 'GL_3DFX_texture_compression_FXT1' : { 'number' : 206, 'flags' : { 'public' }, 'supporters'", "'GL_SGI_complex_type' : { 'number' : 88, 'flags' : { 'incomplete' }, 'supporters' :", ": { 'arbnumber' : 32, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { '3DL', 'ATI', 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', },", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : { 'arbnumber' : 103,", "'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : { 'arbnumber' : 72, 'flags' : {", ": 189, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' :", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', },", "'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : { 'number' : 236, 'flags' : {", ": 518, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' :", "'number' : 258, 'flags' : { 'public' }, 'supporters' : { 'SUN' },", "'GL_ARB_parallel_shader_compile' : { 'arbnumber' : 179, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : { 'number' : 297, 'flags' :", ": { 'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : { 'number' :", "47, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : {", "'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : { 'arbnumber' : 96, 'flags' : {", "81, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : { 'esnumber' : 195, 'flags' :", "}, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : {", "}, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : {", ": { 'number' : 114, 'flags' : { 'public' }, 'supporters' : {", ": 102, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": 352, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' },", "}, 'GL_NV_alpha_to_coverage_dither_control' : { 'number' : 500, 'flags' : { 'public' }, 'url'", "}, 'GL_SGIS_multisample' : { 'number' : 25, 'flags' : { 'public' }, 'supporters'", "'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel'", ": 72, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' :", ": 263, 'flags' : { 'public' }, 'supporters' : { 'GOOGLE', 'NVIDIA', 'VMware'", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear' }, },", "187, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : {", "'supporters' : { 'IBM', 'IMG', 'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : {", "}, 'GL_ARB_seamless_cube_map' : { 'arbnumber' : 65, 'flags' : { 'public' }, 'url'", ": { 'esnumber' : 238, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt',", "}, 'GL_NV_texture_shader3' : { 'number' : 265, 'flags' : { 'public' }, 'supporters'", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : { 'flags' : { 'obsolete'", ": 264, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'number' : 99, 'flags' : { 'incomplete' }, 'supporters' : {", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : { 'number' : 247,", "'alias' : { 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' : { 'number' : 192, 'flags'", ": 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : { 'arbnumber' : 31, 'flags' : { 'public'", "{ 'number' : 456, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'public' }, 'supporters' : { 'ES', 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt',", "'GL_APPLE_ycbcr_422' : { 'number' : 275, 'flags' : { 'public' }, 'supporters' :", ": 62, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' :", "'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : { 'arbnumber' : 94, 'flags' : { 'public' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : { 'number' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : { 'number' :", "{ 'IBM', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : {", "'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : { 'flags' : { 'obsolete' }, 'url'", ": 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber' : 144, 'flags' : { 'public'", ": 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber' : 177, 'flags' : { 'public'", "'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : { 'esnumber' : 12, 'flags' : { 'public' },", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : { 'flags'", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt',", ": 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : { 'number' : 18, 'flags' : { 'public'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize'", ": 313, 'flags' : { 'public' }, 'supporters' : { '3DL' }, 'url'", "}, 'GL_OES_texture_float_linear' : { 'esnumber' : 35, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : {", "'GL_NV_texture_rectangle_compressed' : { 'number' : 509, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : { 'esnumber'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber' :", "'esnumber' : 194, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : { 'arbnumber' : 71, 'flags'", "'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' : 42, 'flags' : {", "'esnumber' : 127, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range' },", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather'", ": { 'number' : 314, 'flags' : { 'public' }, 'supporters' : {", "{ 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : { 'number' :", "'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : { 'number' : 252, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : { 'arbnumber' : 153, 'flags' : { 'public'", "'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : { 'number' : 38,", "'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : { 'esnumber' : 220, 'flags'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : { 'number' : 356,", "'GL_NV_bindless_multi_draw_indirect_count' : { 'number' : 456, 'flags' : { 'public' }, 'supporters' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : { 'number' : 194, 'flags'", ": 389, 'esnumber' : 260, 'flags' : { 'public' }, 'supporters' : {", "381, 'esnumber' : 271, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : { 'arbnumber' :", "}, 'GL_INGR_interlace_read' : { 'number' : 175, 'flags' : { 'public' }, 'supporters'", "'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : { 'number' : 491, 'esnumber' : 265, 'flags' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : { 'arbnumber'", "{ 'number' : 193, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'number' : 101, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : { 'esnumber' : 139, 'flags'", "'GL_AMD_vertex_shader_layer' : { 'number' : 417, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : { 'esnumber' :", "{ 'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt',", "'GL_OES_viewport_array' : { 'esnumber' : 267, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : { 'esnumber' : 244, 'flags' :", "'GL_OES_EGL_image' : { 'esnumber' : 23, 'flags' : { 'public' }, 'url' :", ": 205, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'GL_MESA_window_pos' : { 'number' : 197, 'flags' : { 'public' }, 'supporters' :", "86, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : { 'number'", "'GL_INTEL_framebuffer_CMAA' : { 'number' : 481, 'esnumber' : 246, 'flags' : { 'public'", "'GL_OES_depth24' : { 'esnumber' : 24, 'flags' : { 'public' }, 'url' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : { 'number' :", "}, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : { 'arbnumber' : 113, 'flags' :", "'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : { 'flags' : { 'incomplete', 'obsolete' },", ": 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : { 'flags' : { 'incomplete' }, 'url' :", "'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug'", ": 213, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "}, }, 'GL_EXT_geometry_shader4' : { 'number' : 324, 'flags' : { 'public' },", ": { 'number' : 78, 'flags' : { 'public' }, 'supporters' : {", "'WGL_NV_DX_interop' : { 'number' : 407, 'flags' : { 'public' }, 'supporters' :", ": 153, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' :", ": { 'number' : 266, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : { 'arbnumber' : 18, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : { 'number' : 314, 'flags' :", ": 96, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' :", "{ 'number' : 298, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' :", "'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test'", "'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : { 'esnumber' : 211, 'flags' : {", "}, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' :", ": { 'MS', 'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : { 'number'", ": 344, 'flags' : { 'public' }, 'supporters' : { 'MESA', 'NVIDIA' },", "'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', },", "'number' : 202, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'GL_EXT_secondary_color' : { 'number' : 145, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : { 'number' : 234, 'flags' : { 'public'", "'url' : 'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' : {", ": 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : { 'esnumber' : 212, 'flags' : { 'public'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' :", "'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : { 'arbnumber' : 144, 'flags'", "'esnumber' : 128, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update'", ": 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : { 'esnumber' : 159, 'flags' : { 'public'", "}, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : { 'number' : 153, 'flags' :", "}, 'GL_SGI_complex_type' : { 'number' : 88, 'flags' : { 'incomplete' }, 'supporters'", "253, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' :", "'arbnumber' : 186, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp'", "}, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : { 'number' : 521, 'esnumber' :", "'esnumber' : 235, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage'", "'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : { 'flags' : { 'incomplete' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : { 'number' : 356, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' :", "{ 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : { 'number' : 455,", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt',", "{ '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : { 'esnumber'", "{ 'number' : 127, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'number' : 264, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : { 'arbnumber' : 56, 'flags'", "'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' : 110, 'flags' : { 'public' },", "'GL_ARB_fragment_coord_conventions' : { 'arbnumber' : 63, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : { 'number' : 378, 'flags'", "'GL_ARB_point_parameters' : { 'arbnumber' : 14, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 67, 'flags' : { 'public' }, 'supporters' : { 'HP'", "'GL_NV_texture_compression_s3tc_update' : { 'esnumber' : 95, 'flags' : { 'public' }, 'url' :", "}, 'GL_ARB_robustness' : { 'arbnumber' : 105, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : { 'number' : 164, 'flags' :", "{ 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', },", ": 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : { 'number' : 272, 'flags' : { 'public'", "'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' : { 'arbnumber' : 1, 'flags' : { 'public'", ": 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : { 'number' : 494, 'flags' : { 'public'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : { 'flags' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : { 'number' :", "'arbnumber' : 66, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : { 'arbnumber' :", "'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : { 'esnumber' : 245, 'flags'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' : 151,", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : { 'arbnumber' :", ": 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : { 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader'", "'GL_AMD_gpu_shader_half_float_fetch' : { 'number' : 519, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : { 'number' : 216, 'flags' :", "'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : {", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : { 'flags' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : { 'number' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : { 'number' :", "}, 'GL_NV_internalformat_sample_query' : { 'number' : 475, 'esnumber' : 196, 'flags' : {", "}, 'GL_NV_framebuffer_multisample' : { 'esnumber' : 143, 'flags' : { 'public' }, 'url'", "'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : { 'number' : 439,", "}, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : { 'number' : 289, 'flags' :", "'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' : 166, 'flags' : { 'public' }, 'url' :", "'alias' : { 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' : { 'number' : 98, 'flags'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : {", "/ 540 only.', }, 'GL_SGIX_ycrcba' : { 'number' : 203, 'flags' : {", "'number' : 494, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : { 'number' : 507,", "}, 'GLX_SGI_make_current_read' : { 'number' : 42, 'flags' : { 'public' }, 'supporters'", ": { 'arbnumber' : 5, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt',", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt',", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber'", "'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : { 'number' : 300, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : { 'number'", "'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen'", "{ 'number' : 371, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', },", ": 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : { 'esnumber' : 268, 'flags' : { 'public'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : { 'number' : 133, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber' : 125,", "'GL_EXT_blend_func_extended' : { 'esnumber' : 247, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' :", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : { 'number' : 141,", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', },", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : { 'esnumber' : 221, 'flags'", ": 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : { 'number' : 82, 'flags' : { 'incomplete'", "'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' : { 'arbnumber' : 26,", "'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : { 'esnumber' : 207, 'flags'", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', },", "'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : { 'number'", "'number' : 215, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "}, 'GL_ARB_base_instance' : { 'arbnumber' : 107, 'flags' : { 'public' }, 'url'", ": 92, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' :", "'comments' : 'Shares extension spec with WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error' }, },", "}, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' : 166, 'flags' :", "}, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : { 'number' : 265,", "'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : { 'esnumber'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt', },", "'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : { 'number' : 407, 'flags'", "'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : { 'number' : 267, 'flags'", ": { '3DFX', '3DL', 'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : {", "'flags' : { 'public' }, 'supporters' : { 'IBM', 'INGR', 'KGC', 'SGI' },", "'GL_NV_shadow_samplers_array' : { 'esnumber' : 146, 'flags' : { 'public' }, 'url' :", "{ 'number' : 231, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'GL_SGIX_flush_raster' : { 'number' : 61, 'flags' : { 'public' }, 'supporters'", ": 124, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' :", ": { 'esnumber' : 120, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt',", "'IBM', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : { 'esnumber'", "}, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : { 'esnumber' : 21, 'flags' :", "'esnumber' : 21, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float'", "217, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "'GL_EXT_light_texture' : { 'number' : 117, 'flags' : { 'public' }, 'supporters' :", ": 67, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : { 'number'", "'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : { 'number' : 154, 'flags' : {", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' :", "{ 'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : { 'number' : 246,", "{ 'number' : 366, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA',", "'public' }, 'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : { 'esnumber' : 145, 'flags'", "{ 'esnumber' : 32, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil4.txt', },", "'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : { 'esnumber' : 55, 'flags' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : { 'number' : 120,", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : { 'number' : 92, 'flags'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : {", "}, 'GL_NV_transform_feedback' : { 'number' : 341, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'ES', 'INGR', 'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' :", "'GL_OES_read_format' : { 'number' : 295, 'esnumber' : 17, 'flags' : { 'public'", "'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : { 'number' : 118, 'flags' : { 'incomplete' },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group'", "361, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : { 'number'", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : { 'number'", "{ 'esnumber' : 120, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', },", "'arbnumber' : 124, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query'", "'esnumber' : 28, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object'", "'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : { 'esnumber' : 13, 'flags' : { 'incomplete', 'private'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt',", "{ 'number' : 88, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : {", ": 437, 'esnumber' : 161, 'flags' : { 'public' }, 'supporters' : {", ": { 'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' :", "'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : { 'number' : 178, 'flags'", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : { 'number'", "'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample'", "'esnumber' : 113, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil'", "172, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : {", ": { 'arbnumber' : 141, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt',", "106, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : {", "}, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : { 'number' : 181, 'flags' :", "{ 'ES', 'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : { 'esnumber' :", ": { 'number' : 331, 'flags' : { 'public' }, 'supporters' : {", ": { 'arbnumber' : 173, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt',", "{ 'esnumber' : 73, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', },", ": { 'number' : 18, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_SGIX_blend_cadd' : { 'number' : 150, 'flags' : { 'incomplete' }, 'supporters'", ": { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt',", ": { 'public' }, 'supporters' : { 'ES', 'HP', 'IBM', 'SGI', 'SUN' },", "{ 'number' : 144, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', },", "'number' : 89, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'esnumber' : 18, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', },", "'GL_APPLE_flush_buffer_range' : { 'number' : 321, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : { 'esnumber' : 36, 'flags' :", "}, 'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : { 'number' : 374, 'flags' :", "{ 'esnumber' : 270, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', },", "}, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : { 'number' : 117, 'flags' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', },", "}, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' :", "'GL_SGIX_pixel_tiles' : { 'number' : 46, 'flags' : { 'obsolete' }, 'supporters' :", "}, 'GL_ARB_internalformat_query' : { 'arbnumber' : 112, 'flags' : { 'public' }, 'url'", "}, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' : 118, 'esnumber' : 117, 'flags' : {", ": 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : { 'number' : 81, 'flags' : { 'public'", "27, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : {", "'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : { 'number' : 360, 'esnumber' : 50,", ": { 'number' : 76, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : { 'flags' : { 'incomplete', 'public' }, 'url'", "'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt',", "'GL_EXT_packed_float' : { 'number' : 328, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt',", ": { 'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : { 'number' :", ": 393, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : { 'number' : 318, 'flags' : {", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : { 'number' : 499, 'flags'", "179, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'GL_ARB_bindless_texture' : { 'arbnumber' : 152, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' :", "}, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : { 'arbnumber' : 96, 'flags' :", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt',", "'WGL_EXT_pixel_format' : { 'number' : 170, 'flags' : { 'public' }, 'supporters' :", "'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : { 'arbnumber' : 78, 'flags' : { 'public' },", "'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : { 'number' : 515, 'esnumber' : 292,", "{ 'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : { 'esnumber' : 2,", "'flags' : { 'public' }, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt',", "}, 'GL_OES_shader_io_blocks' : { 'esnumber' : 213, 'flags' : { 'public' }, 'url'", "}, 'GL_ATI_texture_env_combine3' : { 'number' : 279, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 424, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'number' : 361, 'flags' : { 'public' }, 'supporters' : {", "'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : { 'esnumber' : 272, 'flags'", "'number' : 291, 'esnumber' : 4, 'flags' : { 'public' }, 'supporters' :", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture'", "'GOOGLE', 'NVIDIA', 'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : { 'number' :", "'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : { 'number' : 389, 'esnumber' : 260, 'flags' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : { 'esnumber' : 269,", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : {", "'GLX_AMD_gpu_association' : { 'number' : 398, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 358, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' :", "'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : { 'esnumber' : 238, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap'", "'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : { 'number' : 180, 'flags' : {", "89, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : {", ": 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : { 'esnumber' : 28, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : { 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : {", "'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : { 'arbnumber' : 20, 'flags' : {", "'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : { 'number' : 1, 'flags' : { 'public' },", ": 326, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_NV_blend_square' : { 'number' : 194, 'flags' : { 'public' }, 'supporters'", "'esnumber' : 172, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision'", ": 194, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' : 264,", "'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : { 'arbnumber' : 45, 'flags' : {", "}, 'GL_EXT_tessellation_shader' : { 'esnumber' : 181, 'flags' : { 'public' }, 'url'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : { 'number' : 390, 'flags'", "'number' : 155, 'flags' : { 'public' }, 'supporters' : { 'REND' },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : { 'number' : 471,", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : { 'number' : 298, 'flags'", ": { 'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : { 'number' :", ": { 'esnumber' : 37, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt',", "{ 'number' : 262, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : {", "{ 'esnumber' : 204, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', },", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber' : 193,", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' },", "'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : { 'arbnumber' : 24, 'flags' : { 'public' },", "184, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt',", ": 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' : { 'esnumber'", "114, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : {", "}, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber' : 187, 'flags' : { 'public' }, 'url'", "{ 'number' : 233, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'GLU_EXT_nurbs_tessellator' : { 'number' : 100, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : { 'number' :", "}, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : { 'number' : 311, 'flags' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber' :", "'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : { 'number' : 522, 'esnumber' : 301, 'flags' :", "'GL_EXT_texture_storage' : { 'esnumber' : 108, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : { 'number'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float'", "}, 'WGL_ARB_pbuffer' : { 'arbnumber' : 11, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : { 'number' : 413, 'flags' : {", ": 182, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url'", "{ 'public' }, 'supporters' : { 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' :", "'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : {", "'number' : 261, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'arbnumber' : 94, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt',", ": 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : { 'number' : 490, 'esnumber' : 263, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : { 'number'", "'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : { 'arbnumber' : 154, 'flags'", "}, 'GL_SGIX_quad_mesh' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', },", "'GL_SGIX_cylinder_texgen' : { 'number' : 140, 'flags' : { 'incomplete' }, 'supporters' :", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : { 'esnumber' : 78,", "'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : { 'number' : 119, 'flags' : { 'public' },", "'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : { 'number' : 33, 'flags' : { 'public' },", "'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : { 'esnumber' : 123, 'flags' : {", "}, 'GL_OML_interlace' : { 'number' : 239, 'flags' : { 'public' }, 'supporters'", ": 250, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url'", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', },", ": { 'number' : 174, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : { 'esnumber' : 212, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared extension number 15 with SGIS_pixel_texture.',", "}, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : { 'number' : 385, 'flags' :", "'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : { 'number' : 24,", ": 18, 'flags' : { 'public' }, 'supporters' : { 'ES', 'SGI' },", ": 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : { 'number' : 495, 'flags' : { 'public'", ": 98, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : { 'number'", "}, 'GL_ARB_copy_image' : { 'arbnumber' : 123, 'flags' : { 'public' }, 'url'", ": 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : { 'esnumber' : 63, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : { 'esnumber' : 77, 'flags'", ": { 'number' : 136, 'flags' : { 'public' }, 'supporters' : {", "}, 'WGL_NV_DX_interop' : { 'number' : 407, 'flags' : { 'public' }, 'supporters'", "'number' : 279, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", ": { 'number' : 62, 'flags' : { 'public' }, 'supporters' : {", "'number' : 508, 'esnumber' : 284, 'flags' : { 'public' }, 'url' :", ": { 'number' : 240, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : { 'number' : 262, 'flags' : { 'public'", "'number' : 443, 'esnumber' : 164, 'flags' : { 'public' }, 'url' :", "42, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GL_AMD_shader_stencil_value_export' : { 'number' : 444, 'flags' : { 'public' }, 'url'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' :", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' :", "}, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' :", "'arbnumber' : 117, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample'", "'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : { 'number' : 369, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : { 'number' : 486, 'esnumber'", ": 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : { 'number' : 363, 'flags' : { 'public'", "{ 'arbnumber' : 73, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', },", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : { 'flags' : {", "}, 'GL_NV_draw_texture' : { 'number' : 430, 'esnumber' : 126, 'flags' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' : 110,", "'number' : 281, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_APPLE_vertex_program_evaluators' : { 'number' : 369, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : { 'number'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : {", ": { 'public' }, 'supporters' : { 'HP', 'SGI', 'SUN' }, 'url' :", "'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : { 'number' : 208, 'flags' : { 'public' },", ": 504, 'esnumber' : 281, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt',", "'arbnumber' : 176, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility'", "'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' :", "}, 'GL_SGIX_instruments' : { 'number' : 55, 'flags' : { 'public' }, 'supporters'", ": 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : { 'number' : 287, 'flags' : { 'public'", "516, 'esnumber' : 294, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "{ '3DL', 'ATI', 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : { 'number' : 410, 'esnumber'", "'esnumber' : 271, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : { 'esnumber' :", "237, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", ": 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : { 'arbnumber' : 107, 'flags' : { 'public'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : { 'flags' : {", "'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : { 'number'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' : 96, 'flags'", "'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : { 'esnumber' : 39, 'flags' : { 'public' },", "'esnumber' : 81, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8'", "168, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' :", "'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : { 'number' : 131, 'flags' : {", "}, 'GL_ARB_ES3_compatibility' : { 'arbnumber' : 127, 'flags' : { 'public' }, 'url'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : { 'arbnumber'", ": 359, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : { 'esnumber' :", ": 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : { 'number' : 469, 'esnumber' : 231, 'flags'", "152, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : {", "{ 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' : { 'number' : 510, 'esnumber' : 285,", "'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : { 'number' : 345, 'flags'", ": 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : { 'number' : 307, 'flags' : { 'public'", "}, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : { 'arbnumber' : 80, 'flags' :", "'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : { 'arbnumber' : 23, 'flags' : { 'public' },", "'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' : 277, 'flags' : {", "{ 'number' : 272, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' : 291, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : { 'number' : 461,", "'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : { 'number' : 34, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt', },", "'esnumber' : 134, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range'", "'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : { 'number'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number' :", "'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : { 'number' : 335, 'flags' : {", ": { 'esnumber' : 268, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt',", "'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : { 'arbnumber' : 61, 'flags'", ": 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : { 'esnumber' : 160, 'flags' : { 'public'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : {", "{ 'number' : 342, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : {", "'supporters' : { 'IdSoftware', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : {", ": { 'arbnumber' : 16, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' :", ": 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : { 'esnumber' : 193, 'flags' : { 'public'", ": 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : { 'number' : 179, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1'", "'GL_EXT_map_buffer_range' : { 'esnumber' : 121, 'flags' : { 'public' }, 'url' :", "'GLX_MESA_set_3dfx_mode' : { 'number' : 218, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : { 'number' : 132, 'flags' : {", ": { 'esnumber' : 148, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt',", "178, 'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url' :", "}, 'GL_ARB_occlusion_query2' : { 'arbnumber' : 80, 'flags' : { 'public' }, 'url'", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : { 'number' :", "'GL_AMD_performance_monitor' : { 'number' : 360, 'esnumber' : 50, 'flags' : { 'public'", "'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' : 21, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 6, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt',", "}, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : {", "'GL_SGIX_vertex_preclip' : { 'number' : 210, 'flags' : { 'public' }, 'supporters' :", "'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : { 'number' : 132, 'flags' : { 'incomplete', 'public'", "'GL_IMG_read_format' : { 'esnumber' : 53, 'flags' : { 'public' }, 'url' :", "'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : { 'esnumber' : 12, 'flags'", "'GL_AMD_sample_positions' : { 'number' : 405, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : { 'number' : 436, 'flags' :", ": 295, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' :", "'esnumber' : 125, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator'", "}, 'GL_OES_EGL_image' : { 'esnumber' : 23, 'flags' : { 'public' }, 'url'", "49, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GL_AMD_multi_draw_indirect' : { 'number' : 408, 'flags' : { 'public' }, 'supporters'", "'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' : 58, 'flags' : { 'public' },", "'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : { 'number' : 357, 'flags' : { 'public' },", "{ 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' : { 'number' : 192, 'flags' : {", "'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : { 'number' : 500, 'flags' : {", "'esnumber' : 118, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error'", ": { 'number' : 406, 'flags' : { 'public' }, 'supporters' : {", "'number' : 378, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', },", ": { 'number' : 382, 'flags' : { 'public' }, 'supporters' : {", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : {", "'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' : { 'esnumber'", "'number' : 377, 'esnumber' : 101, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : { 'arbnumber' : 18, 'flags' : {", "'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : {", "'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : { 'number' : 133, 'flags' : { 'incomplete', 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : { 'arbnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : { 'number'", "28, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI' },", ": 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : { 'number' : 396, 'flags' : { 'public'", ": { 'number' : 190, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : { 'esnumber' : 32,", "{ 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex'", ": 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : { 'number' : 163, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : { 'number' : 77, 'flags'", ": { 'ATI', 'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : { 'number'", "}, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : { 'esnumber' : 63, 'flags' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' :", "'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : { 'arbnumber' : 37, 'flags' : { 'public' },", ": 30, 'flags' : { 'public' }, 'supporters' : { 'DEC', 'HP', 'IBM',", ": 144, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' :", ": { 'esnumber' : 13, 'flags' : { 'incomplete', 'private' }, 'comments' :", ": { 'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : { 'number' :", "'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : { 'number' : 330, 'flags' : {", "'GL_APPLE_client_storage' : { 'number' : 270, 'flags' : { 'public' }, 'supporters' :", ": { 'arbnumber' : 21, 'flags' : { 'public' }, 'supporters' : {", "'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' : { 'arbnumber' : 47, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : { 'flags' :", ": { 'number' : 281, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 173, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt', },", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', },", "'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : { 'number' : 302, 'flags' : { 'public' },", ": 125, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' :", ": 6, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : { 'esnumber'", "}, 'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : { 'esnumber' : 212, 'flags' :", "'esnumber' : 162, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map'", ": { 'esnumber' : 256, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt',", "'supporters' : { 'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : { 'number'", "'GL_NV_framebuffer_mixed_samples' : { 'number' : 469, 'esnumber' : 231, 'flags' : { 'public'", "{ 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' : { 'esnumber' : 287, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber' : 89, 'flags' :", "'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : { 'arbnumber' : 116, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt',", "277, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : {", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : { 'flags' : {", "'number' : 273, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : { 'number' : 402, 'esnumber' : 152,", "{ 'esnumber' : 194, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', },", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt',", "'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : { 'number' : 168, 'flags' : {", "'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : {", ": 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : { 'arbnumber' : 42, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : { 'number'", "'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : { 'arbnumber' : 162, 'flags' : {", "'number' : 314, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' : 291, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : { 'arbnumber' : 160, 'flags' : {", "'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : { 'esnumber' : 103, 'flags'", "{ 'number' : 390, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt',", "'esnumber' : 294, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "}, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : { 'number' : 273, 'flags' :", ": { 'number' : 461, 'esnumber' : 225, 'flags' : { 'public' },", "}, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : { 'number' : 453, 'flags' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : { 'number' : 149,", "'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber' : 74, 'flags' : {", "'esnumber' : 39, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture'", ": 45, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : { 'esnumber' : 29, 'flags' : {", "'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : { 'esnumber' : 133, 'flags' : { 'public' },", "'GL_OES_required_internalformat' : { 'esnumber' : 115, 'flags' : { 'public' }, 'url' :", "'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : { 'number' : 186, 'esnumber' : 60, 'flags' :", "'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : { 'number' : 482, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : { 'esnumber'", "}, 'GL_KHR_no_error' : { 'arbnumber' : 175, 'esnumber' : 243, 'flags' : {", "'number' : 3, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM',", "}, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : { 'arbnumber' : 123, 'flags' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : { 'number' :", ": { 'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt',", ": 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : { 'number' : 223, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : { 'number' : 18, 'flags'", "{ 'arbnumber' : 12, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' :", "402, 'esnumber' : 152, 'flags' : { 'public' }, 'supporters' : { 'APPLE',", "{ 'esnumber' : 54, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', },", "{ 'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : { 'number' : 422,", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' :", "165, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : {", "'esnumber' : 91, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced'", "'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : { 'number' : 518, 'flags' : {", "'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : { 'esnumber' : 11, 'flags' : {", "{ 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : { 'number' : 424,", "282, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "129, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : {", ": 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : { 'number' : 2, 'flags' : { 'public'", ": 132, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' },", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : { 'number' : 24, 'flags'", "'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : { 'esnumber' : 213, 'flags' : { 'public' },", "'esnumber' : 273, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated'", "'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : { 'esnumber' : 66, 'flags' : {", ": 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : { 'arbnumber' : 91, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : { 'arbnumber' :", "'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : { 'number' : 396, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : {", ": 380, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : { 'esnumber' :", "'number' : 76, 'flags' : { 'public' }, 'supporters' : { 'TGS' },", "'GL_APPLE_float_pixels' : { 'number' : 368, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : { 'number' : 368, 'flags' : {", "41, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'number' : 79, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : { 'arbnumber'", "'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : { 'esnumber' : 30, 'flags' : { 'public' },", "'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : { 'esnumber' : 278, 'flags' : { 'public' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float'", "372, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'arbnumber' : 159, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility'", "'GL_EXT_memory_object_win32' : { 'number' : 505, 'esnumber' : 282, 'flags' : { 'public'", "}, 'GL_ARB_enhanced_layouts' : { 'arbnumber' : 146, 'flags' : { 'public' }, 'url'", ": 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : { 'number' : 452, 'flags' : { 'public'", ": { 'esnumber' : 141, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt',", "'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : { 'esnumber' : 202, 'flags'", ": 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' : { 'arbnumber' : 160, 'flags' : { 'public'", "'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' : { 'arbnumber' :", "'esnumber' : 90, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array'", "'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : { 'number' : 161, 'flags' : { 'incomplete' },", "}, 'GL_EXT_draw_transform_feedback' : { 'esnumber' : 272, 'flags' : { 'public' }, 'url'", "}, 'GLX_MESA_copy_sub_buffer' : { 'number' : 215, 'flags' : { 'public' }, 'supporters'", "'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : { 'flags' : {", "195, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : {", "'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : { 'arbnumber' : 88, 'flags' : {", "'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : { 'number' : 267, 'flags' : { 'public' },", "'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : { 'number' : 216, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : { 'number' :", ": { 'esnumber' : 167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt',", ": { 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP", "{ 'arbnumber' : 111, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', },", "'number' : 132, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI'", ": 25, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : { 'arbnumber' :", ": { 'number' : 436, 'flags' : { 'public' }, 'supporters' : {", ": 291, 'esnumber' : 4, 'flags' : { 'public' }, 'supporters' : {", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : { 'esnumber' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' :", "'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : { 'arbnumber' : 94, 'flags' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' :", "}, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : { 'number' : 112, 'flags' :", ": { 'number' : 162, 'flags' : { 'incomplete' }, 'supporters' : {", ": 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : { 'esnumber' : 158, 'flags' : { 'public'", "}, 'GL_EXT_swap_control' : { 'number' : 375, 'flags' : { 'public' }, 'supporters'", ": 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : { 'esnumber' : 185, 'flags' : { 'public'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : {", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' :", ": 180, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", ": { 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', },", ": 79, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters'", "{ 'esnumber' : 105, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt', },", "'GL_ARB_shader_subroutine' : { 'arbnumber' : 90, 'flags' : { 'public' }, 'url' :", "}, 'GL_AMD_stencil_operation_extended' : { 'number' : 413, 'flags' : { 'public' }, 'supporters'", ": 64, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' :", "'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : { 'arbnumber' : 13, 'flags'", ": { 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt', },", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : { 'number'", "{ 'number' : 474, 'esnumber' : 261, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : { 'number' : 113, 'flags' : {", "'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : { 'number' : 250, 'flags'", "'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : { 'esnumber' : 209, 'flags'", "'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : { 'number' : 350, 'flags' : {", "'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : { 'esnumber' : 273, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : { 'number'", ": 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : { 'esnumber' : 81, 'flags' : { 'public'", "'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : { 'esnumber' : 124, 'flags' : {", "{ 'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' : 165,", "'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : { 'number' : 52, 'flags' : {", ": { 'number' : 273, 'flags' : { 'public' }, 'supporters' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : { 'number' :", ": { 'number' : 100, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : { 'arbnumber' : 124, 'flags' :", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : { 'number' : 270,", "{ 'arbnumber' : 120, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', },", "97, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : {", "'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt',", "72, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : { 'number' : 381,", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt', },", ": { 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' :", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt', },", "{ 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' : { 'arbnumber' : 119, 'esnumber' :", "}, 'GL_OES_surfaceless_context' : { 'esnumber' : 116, 'flags' : { 'public' }, 'url'", ": { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : {", "'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : { 'esnumber' : 195, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : { 'number' : 500, 'flags' :", "'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : { 'number' : 320, 'flags' : {", "'arbnumber' : 5, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : { 'esnumber' :", ": { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' : { 'esnumber' : 112, 'flags'", "'GL_APPLE_vertex_array_object' : { 'number' : 273, 'flags' : { 'public' }, 'supporters' :", "'GL_NV_parameter_buffer_object2' : { 'number' : 378, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : { 'number' : 482, 'flags' :", "}, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' : 62, 'flags' : { 'public' }, 'url'", "{ 'number' : 490, 'esnumber' : 263, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : { 'flags' : { 'incomplete' },", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' :", "'GL_AMD_transform_feedback3_lines_triangles' : { 'number' : 397, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : { 'number' : 389, 'esnumber' : 260,", "}, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : { 'arbnumber' : 87, 'flags' :", "{ 'esnumber' : 78, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', },", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt',", "488, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : {", ": { 'number' : 11, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber'", "{ 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' : { 'number' : 83, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : { 'number' : 85,", "{ 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : { 'esnumber' : 129,", "'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt',", ": 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : { 'esnumber' : 200, 'flags' : { 'public'", "'GL_APPLE_color_buffer_packed_float' : { 'esnumber' : 194, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : { 'number' : 406, 'flags' :", ": 119, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": { 'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : { 'number' :", "'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : { 'number' : 172, 'flags' : { 'public' },", "}, 'GL_ARB_texture_storage' : { 'arbnumber' : 117, 'flags' : { 'public' }, 'url'", "{ 'esnumber' : 158, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt', },", ": 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : { 'number' : 259, 'flags' : { 'public'", "}, 'GL_OES_framebuffer_object' : { 'esnumber' : 10, 'flags' : { 'public' }, 'url'", ": 57, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' :", "}, 'GL_EXT_texture_snorm' : { 'number' : 365, 'flags' : { 'public' }, 'supporters'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : { 'number' : 130, 'flags'", "'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : { 'number' : 420, 'flags' : { 'public' },", "'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' : 140, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : { 'number'", "}, 'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : { 'esnumber' : 26, 'flags' :", "}, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : { 'number' : 431, 'flags' :", "'GL_SGIX_tag_sample_buffer' : { 'number' : 58, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : { 'number'", "{ 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA'", "'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : { 'arbnumber' : 138, 'flags' : { 'public' },", "'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : { 'esnumber' : 153, 'flags' : { 'public' },", "{ 'esnumber' : 179, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', },", ": { 'esnumber' : 135, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_bgr.txt',", "{ 'number' : 191, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : { 'number' : 115, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt',", ": { 'number' : 437, 'esnumber' : 161, 'flags' : { 'public' },", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : { 'number'", "158, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url'", "}, 'GL_NV_shader_atomic_fp16_vector' : { 'number' : 474, 'esnumber' : 261, 'flags' : {", "230, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : {", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering'", ": 241, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' :", ": { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : { 'esnumber'", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : {", "}, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' : 151, 'flags' : { 'public' }, 'url'", "'number' : 184, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI'", "HP support.', }, 'GL_SGI_complex' : { 'number' : 87, 'flags' : { 'incomplete'", "'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : { 'esnumber' : 105, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : {", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : { 'number' : 112,", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt',", ": { 'number' : 502, 'flags' : { 'public' }, 'supporters' : {", ": 31, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : { 'number'", "'number' : 220, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "}, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : { 'number' : 174, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : { 'esnumber'", "'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : { 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : { 'number' : 56, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : { 'flags' : {", "'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : { 'number' : 398, 'flags'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : { 'arbnumber' : 19, 'flags'", "}, 'GL_EXT_shader_group_vote' : { 'esnumber' : 254, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : { 'number' : 66, 'flags' :", "'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : { 'arbnumber' : 8, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : { 'esnumber'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt', },", ": 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : { 'number' : 43, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : { 'esnumber' : 77,", "'GLX_SGIX_swap_barrier' : { 'number' : 92, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : { 'number' : 217, 'flags'", ": 342, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_NV_vertex_program1_1' : { 'number' : 266, 'flags' : { 'public' }, 'supporters'", "'GL_EXT_cull_vertex' : { 'number' : 98, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber'", "}, 'GL_SGIS_texture_edge_clamp' : { 'number' : 35, 'flags' : { 'public' }, 'supporters'", "'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : { 'arbnumber' : 54, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', },", "'GL_FJ_shader_binary_GCCSO' : { 'esnumber' : 114, 'flags' : { 'public' }, 'url' :", "32, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : {", "{ 'arbnumber' : 8, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "219, 'flags' : { 'incomplete' }, 'supporters' : { 'MESA' }, 'url' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : { 'arbnumber' :", "{ '3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : { 'number' : 206,", ": 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : { 'esnumber' : 68, 'flags' : { 'public'", ": { 'number' : 84, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : { 'esnumber'", "43, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : { 'number' : 9, 'flags' : {", "'number' : 452, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters'", "{ 'number' : 295, 'esnumber' : 17, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 259, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : { 'number' : 128, 'flags' : { 'incomplete' },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', },", "'GL_QCOM_tiled_rendering' : { 'esnumber' : 70, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 154, 'flags' : { 'incomplete' }, 'supporters' : {", "'esnumber' : 139, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3'", ": { 'number' : 455, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : { 'esnumber' : 149,", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : { 'number'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : { 'number'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : { 'number' : 152,", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : { 'number' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : { 'arbnumber' : 77, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : { 'arbnumber' : 50,", "}, 'GL_EXT_draw_buffers' : { 'esnumber' : 151, 'flags' : { 'public' }, 'url'", "'ES', 'INGR', 'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : { 'number' :", "'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : { 'esnumber' : 173, 'flags' : {", "368, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "{ 'esnumber' : 121, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', },", "'esnumber' : 239, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture'", "317, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : {", "'arbnumber' : 150, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : { 'arbnumber'", "}, 'GL_ARB_robustness_isolation' : { 'arbnumber' : 126, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' : 195, 'flags' : {", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset'", "{ 'number' : 158, 'flags' : { 'public' }, 'supporters' : { 'ATI',", "}, 'GL_NV_packed_float' : { 'esnumber' : 127, 'flags' : { 'public' }, 'url'", "'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : { 'number' : 436, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : { 'number' : 420, 'flags'", "{ 'esnumber' : 109, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', },", "{ 'number' : 1, 'flags' : { 'public' }, 'supporters' : { 'IBM',", ": 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : { 'arbnumber' : 56, 'flags' : { 'public'", "{ 'arbnumber' : 116, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', },", "'esnumber' : 107, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB'", "'esnumber' : 246, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture'", "'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : { 'number' : 517, 'esnumber' : 297, 'flags' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : { 'number' :", ": { 'number' : 508, 'esnumber' : 284, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt',", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : { 'number'", "'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : { 'number' : 174, 'flags' : {", "'GL_ARB_invalidate_subdata' : { 'arbnumber' : 132, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : { 'number' : 13, 'flags' :", "175, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : {", "}, 'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : { 'number' : 399, 'flags' :", "{ 'number' : 136, 'flags' : { 'public' }, 'supporters' : { 'INTEL'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', },", "188, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' : 110, 'flags' : { 'public' },", "'GL_ARB_internalformat_query' : { 'arbnumber' : 112, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks'", ": { 'number' : 350, 'flags' : { 'public' }, 'supporters' : {", "'comments' : 'Included with arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : { 'arbnumber' :", "}, 'GL_INTEL_parallel_arrays' : { 'number' : 136, 'flags' : { 'public' }, 'supporters'", ": 439, 'esnumber' : 98, 'flags' : { 'public' }, 'supporters' : {", "'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : { 'number' : 444, 'flags'", "215, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' :", ": 58, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' :", "'arbnumber' : 21, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "}, 'GL_OES_depth24' : { 'esnumber' : 24, 'flags' : { 'public' }, 'url'", "'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : { 'number' : 257, 'flags'", "}, 'GL_ARB_framebuffer_object' : { 'arbnumber' : 45, 'flags' : { 'public' }, 'supporters'", ": 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : { 'arbnumber' : 45, 'flags' : { 'public'", "'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : { 'arbnumber' : 17, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt', },", ": 282, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'number' : 475, 'esnumber' : 196, 'flags' : { 'public' }, 'url'", "'GL_OES_extended_matrix_palette' : { 'esnumber' : 8, 'flags' : { 'public' }, 'url' :", ": { 'number' : 398, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : { 'arbnumber' : 184, 'flags' :", "'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : { 'number' : 84, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on Visual Workstation 320", ": 51, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' :", ": 165, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' :", ": { 'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : { 'number' :", "}, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : { 'esnumber' : 256, 'flags' :", "{ 'esnumber' : 96, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', },", ": 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : { 'esnumber' : 83, 'flags' : { 'public'", ": { 'arbnumber' : 162, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt',", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias'", "'number' : 411, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "}, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : { 'arbnumber' : 9, 'flags' :", "}, 'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : { 'esnumber' : 38, 'flags' :", "'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : { 'arbnumber' : 164, 'flags' : { 'public' },", "'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : { 'arbnumber' : 48, 'flags' : { 'public' },", "{ 'esnumber' : 10, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt', },", "'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : { 'esnumber' : 26, 'flags' : { 'public' },", ": { 'arbnumber' : 82, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt',", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : { 'number' : 53, 'flags'", ": 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : { 'number' : 465, 'esnumber' : 228, 'flags'", "{ 'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : { 'esnumber' : 272,", "'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : {", "{ 'number' : 281, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : { 'number' :", "}, 'WGL_I3D_swap_frame_usage' : { 'number' : 255, 'flags' : { 'public' }, 'supporters'", "'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' : { 'number' : 145,", "}, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : { 'number' : 15, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : { 'arbnumber' : 16, 'flags' :", "'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : { 'number' : 167, 'flags' : {", "'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' : { 'esnumber' : 35,", "'esnumber' : 54, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : { 'number'", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : { 'number' : 65, 'flags'", "'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : { 'number' : 295, 'esnumber' : 17,", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', },", "'esnumber' : 300, 'flags' : { 'public' }, 'supporters' : { 'INTEL' },", "{ 'arbnumber' : 15, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : { 'flags' : { 'incomplete'", "'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : { 'esnumber' : 31, 'flags' : {", "'arbnumber' : 61, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' :", "}, 'WGL_EXT_make_current_read' : { 'number' : 169, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber' :", "'3DFX', '3DL', 'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : { 'esnumber' :", "{ 'number' : 266, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : { 'flags' : { 'incomplete' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : { 'number' : 32, 'flags'", ": { 'arbnumber' : 113, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt',", "}, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : { 'number' : 211, 'flags' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : { 'esnumber'", "{ 'arbnumber' : 36, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : { 'esnumber' : 116, 'flags' : { 'public'", "'number' : 34, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI'", "'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : { 'number' : 490, 'esnumber' : 263, 'flags' :", ": { 'public' }, 'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : { 'number' :", ": { 'arbnumber' : 84, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt',", "'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : { 'number' : 489, 'esnumber' : 296,", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float'", ": 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' : 188, 'flags' : { 'public'", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : {", "{ 'number' : 370, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "}, 'GL_OES_texture_3D' : { 'esnumber' : 34, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' :", ": { 'number' : 490, 'esnumber' : 263, 'flags' : { 'public' },", "'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : { 'number' : 435, 'flags'", "{ 'arbnumber' : 41, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : { 'arbnumber' : 70, 'flags'", ": 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : { 'number' : 425, 'flags' : { 'public'", "'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : { 'number' : 434, 'flags' : { 'public' },", "234, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : {", ": 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' : { 'esnumber'", "'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' : 174, 'flags'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : { 'number'", "'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' : { 'arbnumber' : 75, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : { 'esnumber'", "'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : { 'number' : 143, 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : { 'arbnumber' : 179, 'flags'", "{ 'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt',", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : { 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : { 'arbnumber' :", "{ 'public' }, 'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : { 'esnumber' : 24,", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : {", ": 345, 'flags' : { 'public' }, 'supporters' : { 'GREMEDY' }, 'url'", ": 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : { 'number' : 313, 'flags' : { 'public'", ": 3, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR',", "'GL_SGIX_async_histogram' : { 'number' : 134, 'flags' : { 'incomplete', 'public' }, 'supporters'", "84, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt',", "'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : { 'number' : 272, 'flags' : {", "27, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'SUN' }, 'url'", ": 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : { 'arbnumber' : 11, 'flags' : { 'public'", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : {", "}, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : { 'arbnumber' : 4, 'flags' :", ": { 'number' : 423, 'flags' : { 'public' }, 'supporters' : {", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' :", "'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : { 'number' : 365, 'flags' : { 'public' },", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : { 'flags' : {", "{ 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform'", ": 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : { 'arbnumber' : 194, 'flags' : { 'public'", "'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : { 'number' : 288, 'flags'", "'GL_EXT_draw_buffers' : { 'esnumber' : 151, 'flags' : { 'public' }, 'url' :", ": 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : { 'esnumber' : 180, 'flags' : { 'public'", "'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : { 'number' : 94,", "'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' : { 'arbnumber'", "137, 'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url' :", ": { 'IBM', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : {", "}, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' : 110, 'flags' : { 'public' }, 'url'", "443, 'esnumber' : 164, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', },", "'number' : 382, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : { 'flags' : { 'incomplete'", ": { 'number' : 159, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : { 'arbnumber' : 175, 'esnumber'", ": { 'esnumber' : 146, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt',", ": { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : { 'number' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : { 'number'", "'number' : 478, 'esnumber' : 241, 'flags' : { 'public' }, 'url' :", "'number' : 140, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "}, 'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : { 'arbnumber' : 91, 'flags' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video' }, },", "{ 'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : { 'esnumber' : 12,", "'GL_SGIX_impact_pixel_texture' : { 'number' : 126, 'flags' : { 'incomplete' }, 'supporters' :", "}, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : { 'flags' : { 'obsolete' },", "{ 'esnumber' : 299, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', },", "}, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : { 'number' : 172, 'flags' :", "}, 'GL_ATI_envmap_bumpmap' : { 'number' : 244, 'flags' : { 'public' }, 'supporters'", "'number' : 212, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : { 'number' : 407, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : { 'esnumber' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' :", "}, 'GLX_SGIS_color_range' : { 'number' : 115, 'flags' : { 'public' }, 'supporters'", "{ 'arbnumber' : 121, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', },", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : { 'number' : 143,", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber' : 46,", "{ 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt', },", "'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : {", ": 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : { 'number' : 482, 'flags' : { 'public'", "}, 'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : { 'number' : 401, 'flags' :", "{ 'public' }, 'supporters' : { 'IBM', 'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', },", "'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : { 'esnumber' : 239, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt',", "'number' : 392, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : { 'esnumber' : 211, 'flags' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : { 'number' : 63, 'flags'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt',", ": { 'number' : 75, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_APPLE_float_pixels' : { 'number' : 368, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : { 'number' : 320,", "'comments' : 'Previously shared extension number 15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : {", "'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : { 'number' : 506,", ": { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' : { 'arbnumber' : 58, 'flags'", "}, 'GLX_OML_sync_control' : { 'number' : 238, 'flags' : { 'public' }, 'supporters'", "'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber' : 204, 'flags' : { 'public' },", "104, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : {", "130, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : { 'number' : 97, 'flags' :", ": { 'number' : 272, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' :", "}, 'GL_NV_shader_atomic_float64' : { 'number' : 488, 'flags' : { 'public' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : { 'number' : 378,", ": 489, 'esnumber' : 296, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt',", "}, 'GL_SGIX_pixel_texture_bits' : { 'number' : 127, 'flags' : { 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : { 'arbnumber' : 152, 'flags'", "'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : {", "'esnumber' : 94, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners'", "'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : { 'number' : 137, 'flags'", "{ 'number' : 511, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'number' : 400, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : { 'number' : 54, 'flags' : { 'public'", "}, 'GL_NV_fog_distance' : { 'number' : 192, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : { 'arbnumber'", "'public' }, 'supporters' : { 'ES', 'HP', 'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt', },", "}, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : { 'number' : 59, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : { 'arbnumber' : 38, 'flags' :", "'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' : 99, 'flags' : {", "129, 'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt',", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : { 'arbnumber' : 124,", ": 'Shares extension spec with WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' :", "147, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : { 'arbnumber' : 6,", "}, 'GL_ARB_texture_gather' : { 'arbnumber' : 72, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' :", "'arbnumber' : 6, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "on Visual Workstation 320 / 540 only.', }, 'GL_SGIX_ycrcba' : { 'number' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : { 'number' : 84, 'flags'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : { 'number' : 463,", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : { 'number' : 195, 'flags'", "'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : { 'esnumber' : 257, 'flags' : {", "'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : { 'number' : 354, 'flags' : {", "}, 'supporters' : { 'IBM', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : {", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt',", "'number' : 40, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : { 'number' : 102, 'flags' :", ": 40, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : { 'number' : 18, 'flags' : {", "289, 'flags' : { 'public' }, 'supporters' : { 'ANGLE' }, 'url' :", "'number' : 496, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' : { 'arbnumber' : 119, 'esnumber' : 118,", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' :", "}, 'GL_SGIX_async_histogram' : { 'number' : 134, 'flags' : { 'incomplete', 'public' },", "'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : { 'number' : 299, 'flags' : { 'public' },", "'esnumber' : 275, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers'", "{ 'number' : 314, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included with arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' :", ": 152, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' :", "'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : { 'esnumber' : 248, 'flags' : {", "'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' : { 'arbnumber' :", "{ 'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : { 'number' : 308,", "'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : { 'arbnumber' : 65, 'flags'", "'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : { 'number' : 164, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer'", "'GL_SGI_fft' : { 'number' : 99, 'flags' : { 'incomplete' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' :", "}, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : { 'arbnumber' : 144, 'flags' :", "47, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : { 'number' : 190, 'flags' : { 'public'", ": 30, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : { 'number' : 149, 'flags' :", "}, 'GL_EXT_texture_integer' : { 'number' : 343, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : { 'esnumber' : 32, 'flags' :", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : { 'number' : 89, 'flags'", "'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : { 'number' : 452, 'flags' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : { 'number'", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' :", "286, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : { 'esnumber' : 7, 'flags' : { 'public'", "}, 'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : { 'arbnumber' : 85, 'flags' :", ": 13, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' : 46, 'flags'", "141, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : {", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' :", "}, 'GL_APPLE_specular_vector' : { 'number' : 159, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : { 'arbnumber'", "{ 'esnumber' : 140, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', },", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' : { 'incomplete'", "'number' : 349, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' : { 'esnumber' : 112, 'flags' : { 'public'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : { 'esnumber' : 221,", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : { 'number'", ": 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : { 'number' : 411, 'flags' : { 'public'", ": 'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' : { 'esnumber'", "'GLX_MESA_release_buffers' : { 'number' : 217, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : { 'number' : 459, 'flags' : { 'public' },", "'GL_NV_sample_mask_override_coverage' : { 'number' : 473, 'esnumber' : 236, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', },", "'GLX_SGIX_color_typeXXX' : { 'number' : 72, 'flags' : { 'incomplete' }, 'url' :", ": 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : { 'number' : 275, 'flags' : { 'public'", ": { 'number' : 94, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' : 151, 'flags' : { 'public'", ": 259, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt',", ": 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : { 'esnumber' : 87, 'flags' : { 'public'", ": { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : { 'number' :", "'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt',", ": 'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : { 'number' : 499, 'flags' : { 'public'", ": { '3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : { 'number' :", "WGL_ARB_create_context_profile not needed - see arbnumber 74.', }, 'WGL_ARB_create_context_profile' : { 'arbnumber' :", ": 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : { 'number' : 349, 'flags' : { 'public'", "}, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' : 83, 'flags' : { 'public' }, 'url'", "'arbnumber' : 72, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge'", "321, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : { 'number' : 185, 'flags' : { 'public' },", "{ 'esnumber' : 146, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', },", "}, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : { 'esnumber' : 80, 'flags' :", "}, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber' : 204, 'flags' : { 'public' }, 'url'", ": 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : { 'number' : 17, 'flags' : { 'public'", "'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : { 'esnumber' : 180, 'flags'", "'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : {", "'GL_EXT_color_subtable' : { 'number' : 74, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : { 'esnumber' : 2, 'flags' :", "'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : { 'number' : 307, 'flags' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : { 'number' :", "'supporters' : { 'DEC', 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt',", ": 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : { 'number' : 56, 'flags' : { 'public'", ": { 'number' : 167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt',", ": { 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' : { 'esnumber' : 72, 'flags'", "'GL_ARB_shader_objects' : { 'arbnumber' : 30, 'flags' : { 'public' }, 'supporters' :", "'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : { 'number' : 405, 'flags'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : { 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' : {", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : { 'number' :", ": { 'arbnumber' : 170, 'esnumber' : 190, 'flags' : { 'public' },", ": 54, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : { 'arbnumber' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' :", "'GL_3DFX_tbuffer' : { 'number' : 208, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 31, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', },", ": 140, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' :", "'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : { 'esnumber' : 116, 'flags' : {", "{ 'arbnumber' : 76, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', },", ": 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : { 'number' : 298, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader'", "'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : { 'number' : 120, 'flags' : {", "{ 'number' : 308, 'flags' : { 'public' }, 'supporters' : { 'MESA'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : { 'arbnumber' :", "'GL_OES_vertex_array_object' : { 'esnumber' : 71, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : {", ": 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : { 'number' : 226, 'flags' : { 'public'", "}, 'GL_NV_texture_env_combine4' : { 'number' : 195, 'flags' : { 'public' }, 'supporters'", "'Extension shipped but was not fully specified. Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' :", "'arbnumber' : 137, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine'", "199, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : { 'esnumber' : 75, 'flags'", "{ 'esnumber' : 268, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', },", "'arbnumber' : 7, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : { 'arbnumber' : 81, 'flags' : { 'public' },", "{ 'number' : 513, 'flags' : { 'public' }, 'supporters' : { 'AMD'", ": 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : { 'arbnumber' : 59, 'flags' : { 'public'", "'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : { 'number' : 358, 'flags' : {", ": { 'number' : 488, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt',", "312, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : {", ": 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : { 'number' : 243, 'flags' : { 'public'", "}, 'GL_SGI_color_table' : { 'number' : 14, 'flags' : { 'public' }, 'supporters'", "439, 'esnumber' : 98, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : { 'arbnumber' : 131, 'flags' : { 'public' },", "'GL_VIV_shader_binary' : { 'esnumber' : 85, 'flags' : { 'public' }, 'url' :", "'GL_EXT_debug_marker' : { 'number' : 440, 'esnumber' : 99, 'flags' : { 'public'", ": 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : { 'arbnumber' : 4, 'flags' : { 'public'", "205, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'number' : 353, 'flags' : { 'public' }, 'supporters' : {", "322, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'number' : 206, 'flags' : { 'public' }, 'supporters' : { '3DFX' },", "'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : { 'number' : 61, 'flags'", ": 271, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on", "'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : {", "'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : { 'number' : 275, 'flags' : { 'public' },", "'number' : 200, 'flags' : { 'public' }, 'supporters' : { 'IBM' },", "}, 'GL_NV_vertex_array_range' : { 'number' : 190, 'flags' : { 'public' }, 'supporters'", "}, 'GL_NVX_gpu_memory_info' : { 'number' : 438, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt',", "}, 'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' : 149, 'flags' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' :", "{ 'number' : 151, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "{ 'number' : 507, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : { 'arbnumber' : 22, 'flags' : {", "'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : { 'number' : 31, 'flags' : {", ": { 'number' : 460, 'esnumber' : 252, 'flags' : { 'public' },", ": { 'number' : 58, 'flags' : { 'public' }, 'supporters' : {", "'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : { 'number' : 231, 'flags' : { 'public' },", "'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : { 'arbnumber'", "{ 'arbnumber' : 140, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', },", "318, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : { 'number' : 484, 'flags'", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : { 'number' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', },", "'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : { 'arbnumber' : 32, 'flags' : { 'public' },", "98, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore' },", "'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : { 'number' : 94, 'flags' : {", "259, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : {", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', },", "'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : { 'number' : 95, 'flags'", "'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : {", "{ 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract'", "'arbnumber' : 55, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "}, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : { 'esnumber' : 39, 'flags' :", "}, 'GL_APPLE_copy_texture_levels' : { 'esnumber' : 123, 'flags' : { 'public' }, 'url'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : { 'arbnumber' : 112,", ": { 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt', },", "{ 'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : { 'number' : 238,", "{ 'arbnumber' : 62, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', },", "'GL_ARB_texture_compression' : { 'arbnumber' : 12, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' : 118, 'esnumber' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : { 'number'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : {", "111, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : {", "{ 'public' }, 'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : { 'number' : 421,", ": { 'number' : 243, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt',", "285, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : { 'number' : 385, 'flags'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : { 'number' : 362,", "{ 'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : { 'esnumber' : 209,", ": { 'number' : 183, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' : { 'number'", "'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber' : 74, 'flags'", "'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : { 'number' : 375,", ": 195, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' :", "202, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' : 79, 'flags' : { 'public' }, 'url'", ": 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : { 'number' : 204, 'flags' : { 'incomplete'", ": 89, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt',", "'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : { 'number' : 369, 'flags' : { 'public' },", "'esnumber' : 230, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber'", "}, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : { 'number' : 292, 'esnumber' :", "89, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : {", ": { 'number' : 482, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt',", "'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : { 'esnumber' : 244, 'flags'", ": 175, 'esnumber' : 243, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt',", ": { 'number' : 358, 'flags' : { 'public' }, 'supporters' : {", ": 121, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' :", ": 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : { 'esnumber' : 19, 'flags' : { 'public'", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : {", ": 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : { 'flags' : { 'incomplete' }, 'url' :", "330, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : { 'arbnumber' : 10, 'flags' : {", "'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : { 'number'", ": 202, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' :", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : { 'esnumber'", ": 428, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": 137, 'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url'", "}, 'GL_IBM_rasterpos_clip' : { 'number' : 110, 'flags' : { 'public' }, 'supporters'", ": { 'esnumber' : 54, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt',", "75, 'flags' : { 'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : {", "}, 'GL_ARB_provoking_vertex' : { 'arbnumber' : 64, 'flags' : { 'public' }, 'url'", "'GL_HP_occlusion_test' : { 'number' : 137, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 31, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4'", "}, 'GL_EXT_texture_rg' : { 'esnumber' : 103, 'flags' : { 'public' }, 'url'", ": 56, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' },", "33, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'number' : 203, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "{ 'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : { 'esnumber' : 138,", "'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' :", "'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt', }, 'GL_NV_texture_rectangle' : { 'number' : 229, 'flags' : {", "}, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber' : 187, 'flags' :", "}, 'GL_OES_texture_cube_map_array' : { 'esnumber' : 217, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' : { 'number'", ": { 'number' : 463, 'esnumber' : 259, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : { 'esnumber'", "'GL_EXT_clip_cull_distance' : { 'esnumber' : 257, 'flags' : { 'public' }, 'url' :", "'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : { 'number' : 325, 'flags' : { 'public' },", "'GL_ATI_text_fragment_shader' : { 'number' : 269, 'flags' : { 'public' }, 'supporters' :", ": { 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' :", "'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' :", "'number' : 235, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : { 'number'", "}, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number' : 397, 'flags' : { 'public' }, 'supporters'", ": { 'arbnumber' : 119, 'esnumber' : 118, 'flags' : { 'public' },", ": 383, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : { 'number' :", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : {", "}, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : { 'arbnumber' : 94, 'flags' :", "'esnumber' : 221, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8'", "}, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : { 'esnumber' : 158, 'flags' :", "'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : { 'number' : 424, 'flags'", ": 319, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough'", "{ 'esnumber' : 202, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt', },", "}, 'GLX_SGIX_fbconfig_float' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', },", "'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt',", "'public' }, 'supporters' : { 'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3'", ": 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : { 'number' : 242, 'flags' : { 'public'", ": { 'number' : 500, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt',", ": { 'number' : 169, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' : 5, 'flags' : { 'public' }, 'url'", "'GLX_SGIX_swap_group' : { 'number' : 91, 'flags' : { 'public' }, 'supporters' :", ": 277, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : { 'esnumber' : 116, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : { 'number' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' :", ": 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : { 'number' : 402, 'esnumber' : 152, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : { 'arbnumber'", "'GL_EXT_fog_coord' : { 'number' : 149, 'flags' : { 'public' }, 'supporters' :", "'number' : 375, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'arbnumber' : 40, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "194, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": 340, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : { 'arbnumber' : 147, 'flags'", ": 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : { 'number' : 93, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : { 'esnumber'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : { 'number' : 512, 'flags'", ": 173, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' :", "}, 'GL_SGIS_fog_function' : { 'number' : 64, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 263, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : { 'number'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' : 42, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : { 'number' :", "}, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number' : 456, 'flags' :", "'GL_ARB_copy_image' : { 'arbnumber' : 123, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : { 'arbnumber'", "'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : { 'number' : 354, 'flags'", ": { 'number' : 297, 'flags' : { 'public' }, 'supporters' : {", ": { 'esnumber' : 130, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt',", "'arbnumber' : 46, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : { 'number' : 167, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : { 'number' : 449,", "}, 'GLX_OML_swap_method' : { 'number' : 237, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : { 'number' : 168,", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : { 'number'", "'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' : { 'number' : 72,", "}, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt',", "}, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' : {", "'number' : 239, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "'GL_SGIX_fog_layers' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset'", "'GL_SGIX_calligraphic_fragment' : { 'number' : 82, 'flags' : { 'incomplete' }, 'supporters' :", "514, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : { 'number'", ": 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : { 'number' : 367, 'flags' : { 'public'", ": 197, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", ": { 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt', },", "'number' : 39, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM',", "{ 'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : { 'esnumber' : 44,", "{ 'number' : 415, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : { 'number' : 427, 'flags' : {", ": { 'esnumber' : 223, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt',", "'number' : 30, 'flags' : { 'public' }, 'supporters' : { 'DEC', 'HP',", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : { 'esnumber'", "432, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : { 'arbnumber' : 157, 'flags' : { 'public' },", ": { 'number' : 363, 'flags' : { 'public' }, 'supporters' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : { 'arbnumber' : 36,", "'GL_SGIS_texture4D' : { 'number' : 16, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 111, 'flags' : { 'public' }, 'supporters' : { 'HP'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : {", "'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : { 'number' : 256, 'flags'", ": 20, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'INGR', 'KGC',", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : {", "}, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : { 'number' : 461, 'esnumber' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', },", "'number' : 481, 'esnumber' : 246, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : { 'number'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : { 'number'", "'GL_EXT_vertex_array' : { 'number' : 30, 'flags' : { 'public' }, 'supporters' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : { 'esnumber' : 133,", "}, 'supporters' : { 'IdSoftware', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' :", ": 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : { 'number' : 119, 'flags' : { 'public'", "}, 'GL_ARB_fragment_program_shadow' : { 'arbnumber' : 36, 'flags' : { 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : { 'number' : 463, 'esnumber'", "484, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option'", "'GL_EXT_texenv_op' : { 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' :", "'public' }, 'supporters' : { 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments'", "{ 'arbnumber' : 100, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt', },", "vendor extensions, but shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : { 'esnumber' : 90,", "'number' : 194, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', },", "'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : { 'number'", "{ 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' : { 'number' : 171, 'flags' : {", "'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : { 'number' : 228, 'flags'", "}, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt',", "{ 'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : { 'arbnumber' : 27,", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : { 'number' : 452,", "283, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : { 'number' : 414,", ": { 'number' : 452, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt',", "{ 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', },", "}, 'GL_EXT_depth_bounds_test' : { 'number' : 297, 'flags' : { 'public' }, 'supporters'", "'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : { 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : { 'arbnumber' : 146, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt',", "{ 'number' : 486, 'esnumber' : 295, 'flags' : { 'public' }, 'url'", "'GL_SGIX_pixel_texture_bits' : { 'number' : 127, 'flags' : { 'public' }, 'supporters' :", ": 485, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' :", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : { 'esnumber'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' :", ": 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : { 'number' : 240, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : { 'number' : 248, 'flags' : {", "}, 'GL_OES_texture_mirrored_repeat' : { 'esnumber' : 22, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : { 'number'", ": 447, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' :", "294, 'esnumber' : 6, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "{ 'number' : 223, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt', },", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer'", ": 262, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' :", ": 117, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : { 'arbnumber'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt',", "}, 'GL_QCOM_driver_control' : { 'esnumber' : 55, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', },", "}, 'WGL_ARB_buffer_region' : { 'arbnumber' : 4, 'flags' : { 'public' }, 'supporters'", "295, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : {", "}, 'GL_ARB_texture_compression' : { 'arbnumber' : 12, 'flags' : { 'public' }, 'supporters'", "'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : { 'number' : 395, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : { 'arbnumber'", "}, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : { 'arbnumber' : 117, 'flags' :", ": { 'number' : 178, 'flags' : { 'public' }, 'supporters' : {", "281, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : { 'flags' :", ": 157, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' :", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' :", "{ 'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : { 'number' : 281,", ": { 'number' : 28, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : { 'number' : 291, 'esnumber'", "{ 'esnumber' : 40, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', },", "'GL_ARB_conservative_depth' : { 'arbnumber' : 111, 'flags' : { 'public' }, 'url' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : { 'number' : 459, 'flags'", ": { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt',", "'number' : 55, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' : {", "'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : { 'arbnumber' : 167, 'flags'", ": { 'number' : 119, 'flags' : { 'public' }, 'supporters' : {", ": { 'esnumber' : 178, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt',", "'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : { 'arbnumber' : 147, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : { 'number' : 402, 'esnumber' :", ": 115, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote'", "'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' : 289, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : { 'number'", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', },", "}, 'GL_EXT_framebuffer_sRGB' : { 'number' : 337, 'flags' : { 'public' }, 'supporters'", ": 416, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": 60, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : { 'number'", "}, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : { 'number' : 425, 'flags' :", "}, 'GL_EXT_cull_vertex' : { 'number' : 98, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture'", "'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' : 188, 'flags'", "'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : { 'esnumber' : 150,", "'GL_SGIX_clipmap' : { 'number' : 33, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_ATI_draw_buffers' : { 'number' : 277, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : {", "'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : { 'arbnumber' : 39, 'flags' : { 'public' },", "'MESA' }, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : { 'number' : 308, 'flags'", ": { 'esnumber' : 14, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt',", "'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : { 'flags' : { 'incomplete' }, 'url'", "'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : { 'number' : 418, 'esnumber' : 197,", ": 68, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' :", "'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber' : 89, 'flags' : {", "18, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "}, 'GL_SGIX_fog_blend' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', },", "{ 'number' : 251, 'flags' : { 'public' }, 'supporters' : { 'I3D'", "'GL_ARB_sparse_texture2' : { 'arbnumber' : 186, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' :", "53, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : {", ": { 'arbnumber' : 145, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt',", "}, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : { 'arbnumber' : 147, 'flags' :", "}, }, 'GL_EXT_memory_object_win32' : { 'number' : 505, 'esnumber' : 282, 'flags' :", ": 241, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", ": 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : { 'esnumber' : 47, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : { 'arbnumber' : 97, 'flags' : {", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt', },", "'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : { 'number' : 448, 'flags' : { 'public' },", "'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : { 'esnumber' : 89, 'flags' : { 'public' },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : {", "'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : { 'esnumber' : 86,", ": { 'HP', 'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : { 'number'", "'GL_DMP_shader_binary' : { 'esnumber' : 88, 'flags' : { 'public' }, 'url' :", ": 426, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : { 'number' : 420, 'flags' : {", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : { 'number' :", "'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : {", "'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : { 'arbnumber' : 19, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' :", "454, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : {", "{ 'arbnumber' : 60, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', },", "'number' : 193, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "26, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : { 'arbnumber'", "'GL_OES_matrix_get' : { 'esnumber' : 11, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt',", "{ 'arbnumber' : 2, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : { 'number' : 293, 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : { 'arbnumber'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments'", "'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : { 'number' : 118, 'flags'", ": 394, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' : { 'arbnumber' : 1, 'flags'", "{ 'number' : 276, 'flags' : { 'public' }, 'supporters' : { 'ATI',", "'GL_SGIX_quad_mesh' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : { 'number'", "'number' : 2, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR',", "'GL_ARB_shadow_ambient' : { 'arbnumber' : 24, 'flags' : { 'public' }, 'supporters' :", ": 246, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : {", "'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : { 'esnumber' : 80, 'flags'", "'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : { 'esnumber' : 70, 'flags' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias'", "'GL_SGIX_resample' : { 'number' : 212, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 185, 'flags' : { 'public' }, 'supporters' : { 'ATI',", "{ 'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : { 'esnumber' : 3,", "}, 'GL_OES_stencil8' : { 'esnumber' : 33, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays'", "81, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : {", ": 39, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR',", ": 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : { 'number' : 222, 'esnumber' : 52, 'flags'", ": 372, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : { 'esnumber' : 34, 'flags' : { 'public' },", ": 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : { 'number' : 155, 'flags' : { 'public'", ": { 'ES', 'HP', 'IBM', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' : 83,", "}, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : { 'number' : 249, 'flags' :", "'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : { 'number' : 244, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt',", "}, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : { 'number' : 126, 'flags' :", "'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : { 'arbnumber' : 49, 'flags' : {", ": 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber' : 223, 'flags' : { 'public'", ": { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : { 'number'", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : { 'number' :", "{ 'arbnumber' : 123, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color'", ": { 'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' : 185,", "}, 'GL_ARB_shader_image_load_store' : { 'arbnumber' : 115, 'flags' : { 'public' }, 'url'", "{ 'number' : 514, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'esnumber' : 197, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_ANGLE_depth_texture' : { 'esnumber' : 138, 'flags' : { 'public' }, 'url' :", ": 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : { 'number' : 238, 'flags' : { 'public'", "}, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : { 'number' : 61, 'flags' :", "'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : { 'number' : 28, 'flags' : { 'public' },", ": 102, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' :", "}, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : { 'esnumber' : 148, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt',", "{ 'number' : 427, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", ": { 'arbnumber' : 81, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt',", ": 143, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap'", "'GL_NV_shader_buffer_load' : { 'number' : 379, 'flags' : { 'public' }, 'supporters' :", "}, 'GLX_SGIX_dm_buffer' : { 'number' : 86, 'flags' : { 'public' }, 'supporters'", ": { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : { 'number'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge'", ": { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' : { 'esnumber' : 254, 'flags' :", "'number' : 512, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : { 'arbnumber' : 32, 'flags'", "105, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : {", "'number' : 165, 'flags' : { 'public' }, 'supporters' : { 'SUN' },", "'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : { 'number' : 340, 'flags' : { 'public' },", "'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : { 'number' : 462, 'esnumber' : 226, 'flags' :", "'GL_NV_vertex_program' : { 'number' : 233, 'flags' : { 'public' }, 'supporters' :", "213, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : {", ": 256, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : { 'number' : 390, 'flags' : {", "{ 'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : { 'esnumber' : 68,", "'GLX_SGIS_blended_overlay' : { 'number' : 142, 'flags' : { 'public' }, 'supporters' :", "'arbnumber' : 20, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'ES', 'HP', 'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : {", "235, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : {", ": 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : { 'arbnumber' : 188, 'flags' : { 'public'", "'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : { 'number' : 110, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' :", "75, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "278, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", ": { 'public' }, 'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : { 'esnumber' :", ": { 'arbnumber' : 79, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt',", "'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : { 'number' : 468, 'esnumber'", "263, 'flags' : { 'public' }, 'supporters' : { 'GOOGLE', 'NVIDIA', 'VMware' },", ": { 'number' : 217, 'flags' : { 'public' }, 'supporters' : {", "'number' : 322, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' : 250, 'flags' :", "'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : { 'esnumber' : 38, 'flags' : {", ": 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' : 174, 'flags' : { 'public'", "}, 'GL_ARB_sample_shading' : { 'arbnumber' : 70, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' :", ": { 'esnumber' : 213, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt',", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', },", "'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : { 'arbnumber' : 68, 'flags'", "}, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : { 'number' : 31, 'flags' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture',", ": { 'number' : 373, 'esnumber' : 76, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : { 'number' : 277, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : { 'arbnumber' : 156, 'flags' :", ": 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : { 'number' : 193, 'flags' : { 'public'", ": 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : { 'number' : 279, 'flags' : { 'public'", "'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : { 'number' : 478, 'esnumber' : 241,", "{ 'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : { 'arbnumber' : 4,", "{ 'esnumber' : 113, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', },", "{ 'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : { 'number' : 183,", "'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source'", "55, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : {", "{ 'number' : 466, 'esnumber' : 232, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : { 'number' : 393, 'flags' : {", ": 86, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt',", "}, 'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' :", "'GL_INTEL_fragment_shader_ordering' : { 'number' : 441, 'flags' : { 'public' }, 'supporters' :", "'GL_SGIX_instruments' : { 'number' : 55, 'flags' : { 'public' }, 'supporters' :", "'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : { 'number' : 224, 'flags' : { 'public' },", "'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : { 'esnumber' : 67, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample'", "'arbnumber' : 128, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions'", ": { 'number' : 388, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : {", "'esnumber' : 12, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' :", "'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' : 409, 'flags'", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' : 79,", "'GL_ARM_mali_shader_binary' : { 'esnumber' : 81, 'flags' : { 'public' }, 'url' :", ": { 'esnumber' : 90, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt',", "'WGL_NV_delay_before_swap' : { 'number' : 436, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_ARB_query_buffer_object' : { 'arbnumber' : 148, 'flags' : { 'public' }, 'url'", ": 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : { 'number' : 78, 'flags' : { 'public'", "297, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : {", "}, 'GL_EXT_texture_buffer_object' : { 'number' : 330, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : { 'arbnumber'", "}, 'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : { 'esnumber' : 121, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', }, 'GL_EXT_shadow_funcs' : { 'number' : 267, 'flags' :", "}, 'GL_EXT_base_instance' : { 'esnumber' : 203, 'flags' : { 'public' }, 'url'", "123, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : {", ": { 'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : { 'number' :", "}, 'GLX_NV_delay_before_swap' : { 'number' : 445, 'flags' : { 'public' }, 'url'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : { 'number' : 69,", "'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' : 59, 'flags' : {", "'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : { 'number'", ": { 'esnumber' : 22, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt',", ": 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to GLX_ARB_create_context_profile not needed - see arbnumber 75.',", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : {", "300, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : { 'number' : 339, 'flags' : {", "{ 'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber' : 104,", "}, 'GL_NV_gpu_shader5' : { 'number' : 389, 'esnumber' : 260, 'flags' : {", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' :", "'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : { 'number' : 202, 'flags' : { 'incomplete' },", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : { 'number' : 81,", ": { 'number' : 444, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt',", "'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : { 'number' : 90, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : { 'esnumber' : 53, 'flags'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : { 'number'", "}, 'GLU_SGIX_icc_compress' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', },", "{ 'number' : 328, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'supporters' : { 'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : { 'esnumber'", "}, 'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : { 'esnumber' : 116, 'flags' :", "{ 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' : { 'number' : 145, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : { 'arbnumber' : 41, 'flags' :", "}, 'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : { 'number' :", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage'", ": 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : { 'number' : 327, 'esnumber' : 157, 'flags'", ": 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : { 'number' : 245, 'flags' : { 'public'", ": { 'esnumber' : 158, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt',", ": { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : { 'number' :", "'GL_NV_vertex_program2_option' : { 'number' : 305, 'flags' : { 'public' }, 'supporters' :", "{ 'arbnumber' : 5, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : { 'esnumber' :", ": { 'number' : 85, 'flags' : { 'public' }, 'supporters' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber' :", "'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : { 'number'", "}, 'GLX_NV_video_out' : { 'number' : 348, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : { 'esnumber' : 44, 'flags' :", "'number' : 160, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt',", "{ 'number' : 354, 'flags' : { 'public' }, 'supporters' : { 'Blizzard',", "to WGL_ARB_create_context_profile not needed - see arbnumber 74.', }, 'WGL_ARB_create_context_profile' : { 'arbnumber'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber' :", "'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' : { 'number' : 177, 'flags' : { 'public'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : { 'flags'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : { 'number'", "'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : { 'number' : 209, 'flags' : { 'public' },", "{ 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : { 'number' : 253,", "'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : { 'number' : 245, 'flags' : {", "'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : { 'number' : 343, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : { 'esnumber'", "'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : { 'number'", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : {", "138, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url'", ": 243, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' :", ": 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : { 'number' : 436, 'flags' : { 'public'", ": 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : { 'esnumber' : 139, 'flags' : { 'public'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : {", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : {", ": 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' : 110, 'flags' : { 'public'", "'GL_ARB_internalformat_query2' : { 'arbnumber' : 131, 'flags' : { 'public' }, 'url' :", "'number' : 356, 'flags' : { 'public' }, 'supporters' : { 'IdSoftware', 'NVIDIA'", "'GL_ARB_draw_buffers' : { 'arbnumber' : 37, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 3, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates'", ": 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : { 'arbnumber' : 156, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : { 'number'", ": { 'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' : { 'number' :", "'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' : 92, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : { 'number' : 396, 'flags' :", ": 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber' : 143, 'flags' : { 'public'", "176, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : {", "{ 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : { 'number' :", ": 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear' : { 'esnumber' : 73, 'flags' : { 'public'", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : { 'number' :", ": { 'number' : 165, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : { 'number' : 326, 'flags' :", "'number' : 384, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 74, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' :", "'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : { 'arbnumber' : 8, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : { 'esnumber' : 151, 'flags'", ": { 'esnumber' : 298, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt',", ": 106, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' :", "'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : { 'number' : 220, 'flags' : {", "}, 'GL_INTEL_framebuffer_CMAA' : { 'number' : 481, 'esnumber' : 246, 'flags' : {", ": 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : { 'esnumber' : 70, 'flags' : { 'public'", ": 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : { 'number' : 258, 'flags' : { 'public'", ": { 'number' : 244, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : { 'number' : 74, 'flags'", ": 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : { 'number' : 334, 'flags' : { 'public'", "{ 'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' : 5,", "'GL_DMP_program_binary' : { 'esnumber' : 192, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt', },", ": 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : { 'number' : 90, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : { 'arbnumber'", "}, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' : 135, 'flags' :", "{ 'number' : 397, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : { 'number' : 365, 'flags' : {", ": 138, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : { 'number' : 365, 'flags'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity'", ": 249, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' :", "34, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', },", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace'", "'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : { 'number' : 241, 'flags'", ": 301, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' :", ": 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : { 'number' : 252, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : { 'number' : 80, 'flags' : {", "}, 'GL_SGIX_datapipe' : { 'number' : 152, 'flags' : { 'incomplete' }, 'url'", "'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : { 'number' : 94, 'flags'", ": { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt',", ": { 'arbnumber' : 64, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt',", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : { 'arbnumber' : 23,", "{ 'number' : 270, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : {", "'number' : 306, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'arbnumber' : 50, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'flags' : { 'public' }, 'supporters' : { 'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt',", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias'", "}, 'GL_ARB_spirv_extensions' : { 'arbnumber' : 194, 'flags' : { 'public' }, 'supporters'", "}, 'GL_ARB_multi_bind' : { 'arbnumber' : 147, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' : 250, 'flags'", "'arbnumber' : 18, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber'", ": 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : { 'number' : 382, 'flags' : { 'public'", ": 413, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber' : 106, 'flags'", "'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : { 'esnumber' : 125, 'flags' : { 'public' },", "'number' : 429, 'flags' : { 'public' }, 'supporters' : { 'INTEL' },", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt', },", "{ 'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : { 'esnumber' : 136,", "'3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : { 'number' : 208, 'flags'", "'esnumber' : 8, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap'", "'GL_APPLE_clip_distance' : { 'esnumber' : 193, 'flags' : { 'public' }, 'url' :", "{ 'number' : 129, 'flags' : { 'public' }, 'supporters' : { 'MS'", "'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : { 'number' : 126, 'flags' : { 'incomplete' },", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' :", ": 257, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : { 'esnumber'", ": 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : { 'esnumber' : 156, 'flags' : { 'public'", ": 172, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt', },", "}, 'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : { 'number' : 1, 'flags' :", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : { 'number' : 131, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : { 'esnumber' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : { 'number'", "}, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' : 275, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : {", "490, 'esnumber' : 263, 'flags' : { 'public' }, 'supporters' : { 'GOOGLE',", "{ 'number' : 291, 'esnumber' : 4, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 426, 'flags' : { 'public' }, 'supporters' : {", "'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : { 'arbnumber' : 134, 'flags' : { 'public' },", "'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : { 'number' : 466, 'esnumber' : 232, 'flags' :", "'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt',", "}, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : { 'number' : 331, 'flags' :", ": 147, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' :", "'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' : { 'number' : 324,", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : { 'arbnumber' :", "'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : { 'arbnumber' : 79, 'flags'", "'esnumber' : 13, 'flags' : { 'incomplete', 'private' }, 'comments' : 'Draft spec", "{ 'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : { 'esnumber' : 213,", ": { 'number' : 16, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : { 'esnumber' : 89, 'flags' : {", ": { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : { 'number' :", "'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : { 'esnumber' : 3, 'flags' : { 'public' },", ": { 'arbnumber' : 60, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt',", "}, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' : 58, 'flags' : { 'public' }, 'url'", "258, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' :", "{ 'public' }, 'supporters' : { 'HP', 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt',", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', },", "{ 'esnumber' : 278, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', },", "}, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : { 'number' : 423, 'flags' :", "{ 'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' : 169,", "'GL_NV_fog_distance' : { 'number' : 192, 'flags' : { 'public' }, 'supporters' :", ": 170, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' :", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete'", "'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : { 'esnumber' : 211, 'flags'", "180, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : { 'number'", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : { 'esnumber' : 193, 'flags'", "'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : { 'esnumber' : 145, 'flags' : { 'public' },", "'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : { 'number' : 479, 'esnumber' : 242,", ": 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : { 'esnumber' : 137, 'flags' : { 'public'", "'number' : 44, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM',", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : { 'number'", "- see arbnumber 75.', }, 'GLX_ARB_create_context_no_error' : { 'arbnumber' : 191, 'flags' :", "}, 'GL_NV_framebuffer_mixed_samples' : { 'number' : 469, 'esnumber' : 231, 'flags' : {", "'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count'", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : { 'number' : 256,", ": { 'esnumber' : 138, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt',", "{ 'number' : 137, 'flags' : { 'public' }, 'supporters' : { 'HP'", "}, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : { 'esnumber' : 142, 'flags' :", "}, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : { 'number' : 218, 'flags' :", ": { 'esnumber' : 2, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : {", "}, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : {", "{ 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : {", "'GL_SGIX_shadow' : { 'number' : 34, 'flags' : { 'public' }, 'supporters' :", "93, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : {", "}, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : { 'arbnumber' : 14, 'flags' :", "'number' : 296, 'esnumber' : 16, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 211, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias'", "'number' : 153, 'flags' : { 'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync'", "'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : { 'esnumber' : 290, 'flags' : { 'public' },", "'number' : 243, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image'", ": 85, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : { 'number' : 323, 'flags' : { 'public' },", "}, 'GL_AMD_pinned_memory' : { 'number' : 411, 'flags' : { 'public' }, 'supporters'", ": 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : { 'number' : 450, 'flags' : { 'public'", ": 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : { 'number' : 518, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : { 'number' : 69, 'flags' : {", "{ 'arbnumber' : 80, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', },", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', },", ": { 'arbnumber' : 44, 'flags' : { 'public' }, 'supporters' : {", "'GL_ARB_vertex_attrib_binding' : { 'arbnumber' : 125, 'flags' : { 'public' }, 'url' :", ": 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : { 'number' : 304, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' : 110, 'flags'", "{ 'number' : 325, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : { 'arbnumber' : 165, 'flags' : { 'public' },", "'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' : 174, 'flags' : {", "'esnumber' : 277, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates'", ": 239, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' :", ": { 'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : { 'esnumber' :", "'supporters' : { 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal'", "'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : { 'esnumber' : 20, 'flags' : {", "'GL_EXT_texture_sRGB_RG8' : { 'esnumber' : 223, 'flags' : { 'public' }, 'url' :", "{ 'arbnumber' : 43, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : { 'arbnumber' :", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : { 'esnumber' : 113, 'flags'", "184, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : {", "}, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : { 'esnumber' : 62, 'flags' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' :", "457, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'number' : 21, 'flags' : { 'public' }, 'supporters' : { 'KGC',", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : { 'number' : 51, 'flags'", ": { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : { 'esnumber'", "'GL_APPLE_vertex_program_evaluators' : { 'number' : 369, 'flags' : { 'public' }, 'supporters' :", "'GL_EXT_cmyka' : { 'number' : 18, 'flags' : { 'public' }, 'supporters' :", "'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : { 'esnumber' : 28, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : { 'number'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber'", "'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : { 'arbnumber' : 33, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : { 'number' : 449, 'flags' :", ": { 'number' : 209, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt',", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : {", "}, 'GL_OES_stencil1' : { 'esnumber' : 31, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : { 'arbnumber' : 87, 'flags' : {", "}, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber' : 223, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : {", ": 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : { 'esnumber' : 109, 'flags' : { 'public'", "'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : { 'number' : 253, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : { 'number'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, },", ": 332, 'esnumber' : 286, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : { 'number' : 493, 'flags' :", "}, 'GL_ATI_pn_triangles' : { 'number' : 246, 'flags' : { 'public' }, 'supporters'", "'GL_KHR_context_flush_control' : { 'arbnumber' : 168, 'esnumber' : 191, 'flags' : { 'public'", "}, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt',", "'number' : 241, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : { 'esnumber'", "91, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : {", "280, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : { 'number' : 308, 'flags' : { 'public' },", ": 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : { 'esnumber' : 172, 'flags' : { 'public'", "{ 'number' : 201, 'flags' : { 'public' }, 'supporters' : { 'IBM'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : { 'arbnumber'", "140, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : {", "'esnumber' : 222, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr'", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : { 'arbnumber'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : { 'number' : 464,", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt', }, 'GL_AMD_shader_stencil_export' : {", ": 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' : 42, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : { 'esnumber' : 141, 'flags'", "GLX_ARB_create_context_profile not needed - see arbnumber 75.', }, 'GLX_ARB_create_context_no_error' : { 'arbnumber' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' :", "'GL_SGIS_line_texgen' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample'", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : { 'number' : 403,", "'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt',", ": { 'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : { 'number' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : { 'number' : 129,", "{ 'number' : 448, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', },", "'GL_EXT_bgra' : { 'number' : 129, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : { 'number' : 522, 'esnumber'", "'public' }, 'supporters' : { '3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' :", "the OpenGL extension with the same name string.', }, 'GL_EXT_separate_specular_color' : { 'number'", ": 169, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' },", ": 254, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' :", "}, 'GL_AMD_draw_buffers_blend' : { 'number' : 366, 'flags' : { 'public' }, 'supporters'", "293, 'esnumber' : 18, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "266, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber'", "'GL_EXT_coordinate_frame' : { 'number' : 156, 'flags' : { 'public' }, 'url' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : { 'number'", "{ 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : { 'number' :", "'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : { 'flags' : { 'obsolete'", "{ 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : { 'number' : 517,", "'GL_EXT_shader_group_vote' : { 'esnumber' : 254, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : { 'number'", "'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : { 'arbnumber' : 138, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : { 'arbnumber' : 79, 'flags' :", ": { 'arbnumber' : 38, 'flags' : { 'public' }, 'supporters' : {", "'number' : 482, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture'", ": { 'esnumber' : 180, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt',", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : { 'number' : 392, 'flags'", "'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : { 'arbnumber' : 10, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : { 'arbnumber' :", ": 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : { 'flags' : { 'incomplete' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4'", ": { 'number' : 173, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber' : 159, 'flags' : {", "100, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt',", ": 98, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' },", ": { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : { 'number' :", "186, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : {", "'GL_ARB_clear_buffer_object' : { 'arbnumber' : 121, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : { 'esnumber' : 20,", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : { 'number' :", "14, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI', 'SUN' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : { 'number' : 328,", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' :", "}, 'GL_SGIX_blend_cmultiply' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', },", ": 443, 'esnumber' : 164, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber' :", ": 116, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' :", "}, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' : 99, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : { 'number' : 312, 'flags' :", "'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' : { 'number' : 22,", "'GLX_INTEL_swap_event' : { 'number' : 384, 'flags' : { 'public' }, 'supporters' :", ": { 'esnumber' : 250, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt',", ": 97, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' },", "{ 'number' : 522, 'esnumber' : 301, 'flags' : { 'public' }, 'url'", "'number' : 460, 'esnumber' : 252, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : { 'arbnumber' : 133,", "{ 'number' : 472, 'esnumber' : 235, 'flags' : { 'public' }, 'url'", "296, 'esnumber' : 16, 'flags' : { 'public' }, 'supporters' : { 'KHR'", ": 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : { 'flags' : { 'incomplete' }, 'url' :", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : { 'number' : 370, 'flags'", "'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' : 253, 'flags' : { 'public' },", "'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility'", "'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : { 'number' : 419, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt',", ": { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', },", "'GL_ANGLE_translated_shader_source' : { 'esnumber' : 113, 'flags' : { 'public' }, 'url' :", ": 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : { 'arbnumber' : 95, 'flags' : { 'public'", "'GL_ARB_occlusion_query' : { 'arbnumber' : 29, 'flags' : { 'public' }, 'supporters' :", "'GL_IBM_rasterpos_clip' : { 'number' : 110, 'flags' : { 'public' }, 'supporters' :", "187, 'flags' : { 'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : {", "'GL_EXT_raster_multisample' : { 'number' : 462, 'esnumber' : 226, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber' : 95,", "'REND' }, 'url' : 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : { 'number' : 276, 'flags'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, },", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : { 'number' :", "'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : { 'number' : 377, 'esnumber' : 101,", "}, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : { 'number' : 334, 'flags' :", "{ 'number' : 159, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : { 'number' : 36,", ": 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : { 'number' : 25, 'flags' : { 'public'", "440, 'esnumber' : 99, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', },", "}, 'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth'", ": 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : { 'arbnumber' : 32, 'flags' : { 'public'", "'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' : { 'number' : 116, 'flags' : { 'obsolete'", ": 395, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'supporters' : { 'IBM', 'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' :", ": 23, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' : 169, 'esnumber' :", "{ 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : { 'number' : 318,", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type' }, },", "'GL_EXT_multi_draw_arrays' : { 'number' : 148, 'esnumber' : 69, 'flags' : { 'public'", "29, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : {", "}, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : { 'number' : 251, 'flags' :", "}, 'GL_OES_rgb8_rgba8' : { 'esnumber' : 30, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : { 'number' : 411, 'flags' : {", ": 194, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'GL_EXT_misc_attribute' : { 'number' : 31, 'flags' : { 'public' }, 'url' :", ": { 'number' : 343, 'flags' : { 'public' }, 'supporters' : {", "'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : { 'number' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : { 'number'", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : { 'number'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', },", "'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : { 'esnumber' : 24, 'flags' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : { 'arbnumber' : 11,", "83, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' :", ": 96, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' :", ": 185, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : { 'esnumber'", ": { 'number' : 310, 'flags' : { 'public' }, 'supporters' : {", "'number' : 487, 'esnumber' : 262, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt', },", ": 248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', },", ": 72, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' :", ": { 'arbnumber' : 115, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt',", "'esnumber' : 55, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : { 'arbnumber' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : { 'esnumber' : 132, 'flags'", "'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : { 'number' : 342, 'flags' : {", "'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : { 'esnumber' : 273, 'flags' : { 'public' },", "'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : { 'number' : 370, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : { 'number' : 477, 'flags' :", "'arbnumber' : 144, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event'", "{ 'arbnumber' : 183, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', },", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : { 'number' :", ": 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : { 'number' : 289, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : { 'number' : 466, 'esnumber' :", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt',", "{ 'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : { 'arbnumber' : 96,", ": 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : { 'number' : 336, 'flags' : { 'public'", ": 96, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' },", ": 127, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' :", "'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : { 'esnumber' : 8, 'flags' : { 'public' },", "55, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'public' }, 'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt',", "'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber' : 95, 'flags'", "'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : { 'number' : 136, 'flags' : {", "148, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : {", "'arbnumber' : 141, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle'", "{ 'arbnumber' : 7, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'GL_NV_evaluators' : { 'number' : 225, 'flags' : { 'public' }, 'supporters' :", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : { 'number' : 43, 'flags'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : { 'number'", "'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' : 96, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'SGI', 'SUN' },", "}, 'WGL_NV_render_depth_texture' : { 'number' : 263, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' : 409, 'flags' : {", ": { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : { 'number' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' :", ": 9, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", ": { 'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : { 'esnumber' :", "473, 'esnumber' : 236, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', },", "}, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : { 'arbnumber' : 90, 'flags' :", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' :", "'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber'", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : { 'arbnumber' :", "37, 'esnumber' : 65, 'flags' : { 'public' }, 'supporters' : { 'HP',", ": { 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' : { 'flags' : { 'obsolete' },", "{ 'number' : 147, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', },", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber' :", "{ 'number' : 338, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory'", "'public' }, 'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : { 'esnumber' : 24, 'flags'", ": 94, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' },", "247, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : {", "{ 'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control'", ": { 'number' : 348, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' : 165, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : { 'number' : 476, 'esnumber'", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', },", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : { 'flags' :", "}, 'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : { 'number' : 347, 'flags' :", ": { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : { 'number'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : { 'number'", "}, 'GL_APPLE_sync' : { 'esnumber' : 124, 'flags' : { 'public' }, 'url'", "'GL_ARB_transpose_matrix' : { 'arbnumber' : 3, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt',", "}, 'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber' : 110, 'flags' :", "{ 'number' : 353, 'flags' : { 'public' }, 'supporters' : { 'Blizzard',", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : { 'esnumber' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : { 'flags' :", "}, 'GL_OES_texture_stencil8' : { 'esnumber' : 173, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : { 'number' : 444, 'flags' :", "'esnumber' : 72, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', },", "'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' : 92, 'flags' : {", "'GL_EXT_polygon_offset' : { 'number' : 3, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : { 'arbnumber' : 174, 'esnumber' :", "96, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : {", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', },", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' :", "}, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber' : 144, 'flags' : { 'public' }, 'url'", ": 66, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : { 'arbnumber' : 28,", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : { 'esnumber' : 181,", ": 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : { 'number' : 507, 'flags' : { 'public'", "{ 'arbnumber' : 178, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', },", "{ 'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : { 'esnumber' : 219,", "'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : { 'esnumber' : 132, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', },", "}, 'GL_SGIX_convolution_accuracy' : { 'number' : 211, 'flags' : { 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : { 'esnumber' : 278, 'flags'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : { 'number' : 418,", ": 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' : { 'esnumber' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : { 'flags'", "'esnumber' : 24, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : { 'number' :", "'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : { 'esnumber' : 209, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : { 'number' : 470, 'esnumber'", ": 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : { 'number' : 283, 'flags' : { 'public'", "}, 'GL_NV_read_depth_stencil' : { 'esnumber' : 94, 'flags' : { 'public' }, 'url'", "}, 'GL_OES_shader_multisample_interpolation' : { 'esnumber' : 172, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : { 'esnumber' :", "'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : { 'number' : 96, 'flags'", "112, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : {", "}, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber' : 63, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : { 'arbnumber'", "'GL_INTEL_texture_scissor' : { 'number' : 135, 'flags' : { 'public' }, 'supporters' :", ": 217, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : { 'arbnumber' :", ": { 'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : { 'number' :", "{ 'number' : 468, 'esnumber' : 230, 'flags' : { 'public' }, 'url'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop2.txt', }, 'WGL_NV_delay_before_swap' : { 'number' :", "119, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : { 'arbnumber' : 45, 'flags' : { 'public' },", "{ 'arbnumber' : 148, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', },", "'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' : { 'arbnumber' : 126, 'flags' : { 'public'", "279, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : { 'number' : 490, 'esnumber'", "}, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber' : 114, 'flags' :", "95, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : {", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : { 'number' :", "'GLX_SGIX_video_resize_float' : { 'number' : 184, 'flags' : { 'incomplete', 'public' }, 'supporters'", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom'", ": { 'esnumber' : 123, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt',", "'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' :", "{ 'number' : 379, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : { 'arbnumber' : 18,", "{ 'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : { 'number' : 496,", "'number' : 173, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'INGR'", ": 6, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'IBM',", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type' },", "}, 'GL_NV_depth_clamp' : { 'number' : 260, 'flags' : { 'public' }, 'supporters'", "'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : { 'number' : 314, 'flags' : { 'public' },", "{ 'number' : 220, 'flags' : { 'public' }, 'supporters' : { 'ATI'", ": 207, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' :", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : { 'number' : 28,", "'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : { 'number' : 439, 'esnumber' : 98,", "{ 'number' : 463, 'esnumber' : 259, 'flags' : { 'public' }, 'url'", "118, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "'GL_EXT_texture3D' : { 'number' : 6, 'flags' : { 'public' }, 'supporters' :", "but was not fully specified. Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : { 'esnumber'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' :", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' : 409, 'flags' :", ": { 'arbnumber' : 70, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt',", ": { 'number' : 412, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : { 'number' : 522, 'esnumber' : 301,", "}, }, 'GL_EXT_multi_draw_indirect' : { 'esnumber' : 205, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped but was not fully specified.", "'flags' : { 'public' }, 'supporters' : { '3DL', 'ATI', 'INTEL', 'NVIDIA' },", "}, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : { 'number' : 418, 'esnumber' :", "'esnumber' : 228, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate'", "189, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : {", ": 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : { 'esnumber' : 220, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt',", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments'", ": 140, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' :", "'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : { 'number' : 210, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt',", "'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : { 'number' : 86, 'flags' : { 'public' },", "{ 'number' : 171, 'flags' : { 'public' }, 'supporters' : { 'INGR',", "}, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : { 'number' : 355, 'flags' :", "}, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : { 'number' : 271, 'flags' :", "'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' : 86, 'flags' : { 'public' },", ": { 'arbnumber' : 135, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt',", ": 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : { 'arbnumber' : 5, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : { 'arbnumber' : 49, 'flags'", "'GL_AMD_depth_clamp_separate' : { 'number' : 401, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt',", "{ 'number' : 477, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_command_list.txt', },", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : { 'esnumber' :", "{ 'esnumber' : 14, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt', },", "'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : { 'number' : 454, 'flags' : { 'public' },", "'comments' : 'Alias to WGL_ARB_create_context_profile not needed - see arbnumber 74.', }, 'WGL_ARB_create_context_profile'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : { 'number'", "}, 'GL_INGR_color_clamp' : { 'number' : 174, 'flags' : { 'public' }, 'supporters'", ": 21, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' :", "'GL_IMG_multisampled_render_to_texture' : { 'esnumber' : 74, 'flags' : { 'public' }, 'url' :", "'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : { 'number' : 334, 'flags' : { 'public' },", "'number' : 387, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_EXT_YUV_target' : { 'esnumber' : 222, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' :", ": 335, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'arbnumber' : 127, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays'", "466, 'esnumber' : 232, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt', },", "'GL_OES_texture_cube_map_array' : { 'esnumber' : 217, 'flags' : { 'public' }, 'url' :", "{ 'esnumber' : 136, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', },", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags'", "'arbnumber' : 157, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store'", "}, 'GL_ARB_texture_compression_bptc' : { 'arbnumber' : 77, 'flags' : { 'public' }, 'url'", "'esnumber' : 71, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float'", "'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set'", "}, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : { 'number' : 517, 'esnumber' :", "'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : { 'number' : 521, 'esnumber'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async'", "'comments' : 'Alias to GLX_ARB_create_context_profile not needed - see arbnumber 75.', }, 'GLX_ARB_create_context_no_error'", "}, 'GL_ARB_texture_float' : { 'arbnumber' : 41, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt',", "{ 'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : { 'arbnumber' : 107,", "'arbnumber' : 191, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' :", "'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : { 'number' : 188, 'flags' : {", "'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP support.', }, 'GL_SGI_complex' : { 'number'", ": 193, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'arbnumber' : 40, 'flags' : { 'public' }, 'supporters' : {", "20, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'INGR', 'KGC', 'SGI'", "{ 'arbnumber' : 26, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'esnumber' : 89, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt',", "{ 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : { 'number' :", "261, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : {", ": { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' :", "'arbnumber' : 108, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include'", "'number' : 164, 'flags' : { 'public' }, 'supporters' : { 'SUN' },", "'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : { 'number' : 303, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : { 'number'", "'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : { 'number' : 238, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' :", "'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : { 'esnumber' : 172, 'flags' : {", "}, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : { 'number' : 101, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' :", ": 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : { 'number' : 338, 'flags' : { 'public'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : { 'esnumber' :", "{ 'number' : 518, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', },", "'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : { 'number' : 50, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : { 'arbnumber'", "}, 'GL_NV_command_list' : { 'number' : 477, 'flags' : { 'public' }, 'url'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : { 'number'", "}, 'GL_SGIX_clipmap' : { 'number' : 33, 'flags' : { 'public' }, 'supporters'", "'arbnumber' : 194, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : { 'number' : 402,", ": { 'number' : 493, 'flags' : { 'public' }, 'supporters' : {", "'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : { 'esnumber' : 121, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations'", "}, 'GL_EXT_texture_compression_dxt1' : { 'number' : 309, 'esnumber' : 49, 'flags' : {", ": { 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' : { 'number' : 510, 'esnumber' :", ": 398, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'GL_NV_geometry_shader_passthrough' : { 'number' : 470, 'esnumber' : 233, 'flags' : { 'public'", "'esnumber' : 104, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control'", "}, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : { 'number' : 204, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : { 'arbnumber' :", "'number' : 355, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "162, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : {", ": 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : { 'number' : 337, 'flags' : { 'public'", ": 128, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' :", ": 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : { 'esnumber' : 239, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' : 56, 'flags'", "'number' : 414, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 278, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' : 130, 'flags'", "{ 'arbnumber' : 13, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : { 'number' : 429, 'flags' : { 'public'", "'GL_ARB_arrays_of_arrays' : { 'arbnumber' : 120, 'flags' : { 'public' }, 'url' :", "'number' : 144, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch'", "}, 'GL_OES_depth32' : { 'esnumber' : 25, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' :", "}, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : { 'number' : 225, 'flags' :", "}, 'GL_OES_point_size_array' : { 'esnumber' : 14, 'flags' : { 'public' }, 'url'", ": 125, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'GL_ARM_mali_program_binary' : { 'esnumber' : 120, 'flags' : { 'public' }, 'url' :", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : { 'number' : 383,", "'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : { 'number' : 350, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : { 'number' : 185, 'flags'", ": { 'number' : 87, 'flags' : { 'incomplete' }, 'supporters' : {", "'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : { 'number' : 427, 'flags'", "'number' : 514, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "}, 'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : { 'number' : 283, 'flags' :", "'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : { 'number' : 66, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias'", "}, 'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : { 'esnumber' : 162, 'flags' :", "'WGL_I3D_gamma' : { 'number' : 251, 'flags' : { 'public' }, 'supporters' :", ": 224, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : { 'arbnumber' : 154,", "{ 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to WGL_ARB_create_context_profile not needed", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' : {", "'GL_ARB_fragment_shader' : { 'arbnumber' : 32, 'flags' : { 'public' }, 'supporters' :", "304, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'arbnumber' : 148, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : { 'number' : 152, 'flags'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate'", "'public' }, 'url' : 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : { 'esnumber' : 33, 'flags'", "{ 'number' : 219, 'flags' : { 'incomplete' }, 'supporters' : { 'MESA'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber' : 187,", ": 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : { 'esnumber' : 67, 'flags' : { 'public'", "}, 'GL_MESA_pack_invert' : { 'number' : 300, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', },", "391, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'GL_KHR_parallel_shader_compile' : { 'arbnumber' : 192, 'esnumber' : 288, 'flags' : {", ": 99, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : { 'number'", "186, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : {", "'number' : 145, 'flags' : { 'public' }, 'supporters' : { '3DFX', 'NVIDIA',", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode' : { 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension spec", ": { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : { 'esnumber' :", "429, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' :", "{ 'esnumber' : 180, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', },", ": { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : { 'flags' :", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output'", "'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : { 'esnumber' : 143, 'flags'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection'", "'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : { 'number' : 156, 'flags'", "'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : { 'number' :", ": { 'number' : 365, 'flags' : { 'public' }, 'supporters' : {", "{ 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' : { 'arbnumber' : 168, 'esnumber' : 191,", "'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : { 'esnumber' : 86, 'flags'", "'esnumber' : 143, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage'", "}, 'GL_SGIX_ir_instrument1' : { 'number' : 81, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : { 'esnumber' : 13, 'flags' :", "{ 'number' : 367, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : { 'esnumber' :", "'GL_ANGLE_texture_usage' : { 'esnumber' : 112, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : { 'esnumber' : 71,", ": 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : { 'number' : 325, 'flags' : { 'public'", "'WGL_ARB_make_current_read' : { 'arbnumber' : 10, 'flags' : { 'public' }, 'supporters' :", "'GL_EXT_vertex_array_bgra' : { 'number' : 354, 'flags' : { 'public' }, 'supporters' :", "'WGL_I3D_image_buffer' : { 'number' : 253, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_ANGLE_program_binary' : { 'esnumber' : 139, 'flags' : { 'public' }, 'url'", ": { 'arbnumber' : 155, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt',", "'GL_ATI_texture_mirror_once' : { 'number' : 221, 'flags' : { 'public' }, 'supporters' :", "'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : { 'number' : 298, 'flags' : { 'public' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : {", "}, 'GL_ARB_shader_objects' : { 'arbnumber' : 30, 'flags' : { 'public' }, 'supporters'", ": 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : { 'arbnumber' : 8, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : { 'esnumber'", ": { 'number' : 208, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : { 'esnumber' : 94, 'flags' : { 'public'", "{ 'esnumber' : 59, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', },", "11, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : {", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422'", "}, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : { 'number' : 118, 'flags' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : { 'number' : 13, 'flags'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : { 'number' :", ": 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : { 'number' : 399, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : { 'number' : 390, 'flags' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : { 'number' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : { 'number' :", "{ 'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : { 'flags' : {", "'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments'", "'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : { 'arbnumber' : 108, 'flags'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' :", "'GL_SGIX_icc_texture' : { 'number' : 154, 'flags' : { 'incomplete' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt', },", "}, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : { 'arbnumber' : 95, 'flags' :", ": 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : { 'number' : 277, 'flags' : { 'public'", "'esnumber' : 35, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : { 'number'", ": 43, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt',", "'arbnumber' : 139, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression'", "11, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'KGC', 'SGI', 'SUN'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : { 'number'", "'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : { 'number' : 293, 'esnumber'", ": 217, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' :", "'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : { 'number' : 60, 'flags' : { 'public' },", "{ 'number' : 187, 'esnumber' : 41, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', },", ": 32, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' },", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', },", "}, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : {", "{ 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : { 'number' : 215,", "'arbnumber' : 59, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image'", "'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : { 'esnumber' : 11, 'flags'", ": { 'esnumber' : 58, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt',", "'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : { 'number' : 279, 'flags' : {", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : { 'number' : 451,", "}, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : { 'arbnumber' : 27, 'flags' :", "{ 'number' : 382, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "}, 'GL_EXT_read_format_bgra' : { 'esnumber' : 66, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : { 'number' : 381, 'esnumber' : 271,", ": 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : { 'number' : 439, 'esnumber' : 98, 'flags'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : { 'arbnumber' :", "153, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : {", ": 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : { 'esnumber' : 37, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : { 'number' :", "'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : { 'number' : 327, 'esnumber' : 157, 'flags' :", "'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' :", ": { 'number' : 339, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : { 'number' : 206, 'flags' :", "{ 'esnumber' : 251, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', },", "{ 'esnumber' : 33, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil8.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal'", "{ 'number' : 15, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : { 'arbnumber'", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number' : 456,", ": { 'esnumber' : 7, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt',", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' :", "'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : { 'esnumber' : 170, 'flags' : {", ": 36, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'GL_OES_draw_elements_base_vertex' : { 'esnumber' : 219, 'flags' : { 'public' }, 'url'", "}, 'GL_ATI_meminfo' : { 'number' : 359, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : { 'number' : 316, 'flags' :", "'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber' : 109, 'flags'", "{ 'number' : 301, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'public' }, 'supporters' : { 'DEC', 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url'", ": 78, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' :", "95, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : {", "160, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : {", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', },", "308, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "}, 'GLX_SUN_get_transparent_index' : { 'number' : 183, 'flags' : { 'public' }, 'supporters'", "373, 'esnumber' : 76, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : { 'arbnumber' : 69, 'flags' : { 'public' },", ": 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : { 'number' : 310, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : { 'number' : 463, 'esnumber' :", "}, }, 'GL_KHR_debug' : { 'arbnumber' : 119, 'esnumber' : 118, 'flags' :", "{ 'esnumber' : 273, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', },", "}, 'GL_QCOM_framebuffer_foveated' : { 'esnumber' : 273, 'flags' : { 'public' }, 'url'", "'arbnumber' : 134, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : { 'arbnumber' :", "'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : { 'number' : 356, 'flags' : { 'public' },", "'esnumber' : 260, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : { 'number' : 513, 'flags' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : { 'arbnumber' : 172,", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' :", "231, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'obsolete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt',", "'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : { 'number' : 225, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : { 'esnumber' : 30,", "232, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : {", ": 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : { 'arbnumber' : 78, 'flags' : { 'public'", "'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : { 'arbnumber' : 103, 'flags' : { 'public' },", "'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : { 'number' : 125, 'flags' : {", "'number' : 353, 'flags' : { 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA',", "{ 'number' : 320, 'flags' : { 'public' }, 'supporters' : { 'APPLE',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : {", "'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : { 'number' : 454, 'flags'", "{ 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : { 'esnumber' : 147,", ": { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : { 'arbnumber' :", "'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : { 'esnumber' : 160, 'flags' : { 'public' },", ": 80, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' :", "{ 'esnumber' : 145, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt', },", "'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : { 'flags' : {", "'GL_SGIX_blend_cadd' : { 'number' : 150, 'flags' : { 'incomplete' }, 'supporters' :", ": 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : { 'number' : 421, 'flags' : { 'public'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' :", "}, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : { 'arbnumber' : 28, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : { 'arbnumber' : 111,", "}, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on Visual Workstation 320 / 540", "}, 'GL_EXT_unpack_subimage' : { 'esnumber' : 90, 'flags' : { 'public' }, 'url'", "{ 'esnumber' : 2, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt', },", "'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' : 182, 'flags' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : { 'esnumber' : 160,", "{ 'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : { 'number' : 453,", "}, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : { 'arbnumber' : 30, 'flags' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', },", "'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : { 'number' : 219, 'flags' : {", "{ 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' :", "'number' : 440, 'esnumber' : 99, 'flags' : { 'public' }, 'supporters' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : { 'arbnumber' :", "'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : { 'number' : 99, 'flags' : { 'incomplete' },", ": { 'number' : 338, 'flags' : { 'public' }, 'supporters' : {", ": { 'esnumber' : 92, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt',", ": 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : { 'number' : 91, 'flags' : { 'public'", "'arbnumber' : 78, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage'", ": { 'number' : 318, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : { 'esnumber' :", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : { 'number'", "'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : { 'number' : 95, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : { 'arbnumber' : 162, 'flags' :", ": { 'public' }, 'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : { 'number' :", ": 261, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' :", "}, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : { 'number' : 350, 'flags' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : { 'number'", "}, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' : {", ": { 'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : { 'number' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : { 'number'", "{ 'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : { 'number' : 187,", "{ 'esnumber' : 119, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', },", ": 265, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url'", "{ 'arbnumber' : 44, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'esnumber' : 183, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object'", "338, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, }, 'GLX_NV_video_out' : { 'number' : 348, 'flags' : { 'public' },", "'GL_OES_texture_3D' : { 'esnumber' : 34, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt',", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : { 'arbnumber' : 61,", "}, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : { 'number' : 288, 'flags' :", ": { 'number' : 67, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_SGIX_mpeg1' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', },", "}, 'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : { 'number' : 375, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : { 'number' : 319, 'flags' :", "'public' }, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' :", ": 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : { 'number' : 63, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt',", ": 328, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma'", "{ 'number' : 481, 'esnumber' : 246, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : { 'number' : 162, 'flags' :", "206, 'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url' :", "'number' : 178, 'flags' : { 'public' }, 'supporters' : { 'INGR' },", "'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : { 'flags' : { 'incomplete' }, 'url'", "'GL_EXT_texture_sRGB_R8' : { 'esnumber' : 221, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { '3DL' }, 'url' : 'extensions/3DL/WGL_3DL_stereo_control.txt', },", "}, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : { 'esnumber' : 102, 'flags' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', },", "'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : { 'arbnumber' : 154, 'flags' : {", "'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : { 'number' : 241, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', },", "extension which is referred to by some other vendor extensions, but shipped as", "17, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : { 'arbnumber'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber' :", "75.', }, 'GLX_ARB_create_context_no_error' : { 'arbnumber' : 191, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension spec with WGL_ARB_create_context_no_error.',", "'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : { 'number' : 344, 'flags' : {", "{ 'esnumber' : 149, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', },", "'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : { 'esnumber' : 68, 'flags' : { 'public' },", "{ 'number' : 396, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 112, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' :", ": 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : { 'esnumber' : 34, 'flags' : { 'public'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : { 'number'", "46, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : {", "}, 'GL_EXT_shader_io_blocks' : { 'esnumber' : 180, 'flags' : { 'public' }, 'url'", ": 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : { 'number' : 125, 'flags' : { 'incomplete'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : { 'arbnumber' :", "'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : { 'esnumber' : 147, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : { 'esnumber'", "}, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : { 'number' : 150, 'flags' :", "'GL_SGIX_convolution_accuracy' : { 'number' : 211, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access'", "'esnumber' : 103, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB'", "}, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : { 'number' : 372, 'flags' :", ": 'extensions/EXT/EXT_discard_framebuffer.txt', }, 'GL_EXT_disjoint_timer_query' : { 'esnumber' : 150, 'flags' : { 'public'", "'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : { 'esnumber' : 128, 'flags' : { 'public' },", ": 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : { 'number' : 249, 'flags' : { 'public'", "150, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : {", "'esnumber' : 190, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr'", "'GL_ARB_shader_ballot' : { 'arbnumber' : 183, 'flags' : { 'public' }, 'url' :", "}, 'GL_NV_draw_vulkan_image' : { 'number' : 501, 'esnumber' : 274, 'flags' : {", "'arbnumber' : 100, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos'", "}, 'GL_EXT_sparse_texture2' : { 'number' : 463, 'esnumber' : 259, 'flags' : {", "'number' : 339, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt', },", "}, 'GL_NV_fragment_coverage_to_color' : { 'number' : 467, 'esnumber' : 229, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' :", "'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : { 'number' : 272, 'flags' : { 'public' },", ": { 'number' : 336, 'flags' : { 'public' }, 'supporters' : {", "'arbnumber' : 180, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query'", "165, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' :", "{ 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', },", "'number' : 456, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'esnumber' : 192, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : { 'number' : 434, 'flags'", ": 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : { 'number' : 520, 'esnumber' : 122, 'flags'", ": { 'arbnumber' : 104, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt',", ": { 'number' : 292, 'esnumber' : 9, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt', },", ": 284, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : { 'number' : 114, 'flags' :", "'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : { 'number' : 231, 'flags' : {", "}, }, 'GL_NV_primitive_restart' : { 'number' : 285, 'flags' : { 'public' },", "'number' : 10, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP',", "'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : { 'number' : 473, 'esnumber' : 236, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt',", ": 171, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' },", ": { 'esnumber' : 215, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt',", "134, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : {", "{ 'arbnumber' : 115, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', },", "}, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' : 277, 'flags' : { 'public' }, 'url'", ": { 'arbnumber' : 80, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt',", "'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : {", ": { 'arbnumber' : 49, 'flags' : { 'public' }, 'supporters' : {", "'GL_ARB_cl_event' : { 'arbnumber' : 103, 'flags' : { 'public' }, 'url' :", "'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : { 'number' : 136, 'flags' : { 'public' },", "shared extension number 15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : { 'number' : 127,", "'WGL_EXT_extensions_string' : { 'number' : 168, 'flags' : { 'public' }, 'supporters' :", "}, 'GLX_ARB_create_context_robustness' : { 'arbnumber' : 101, 'flags' : { 'public' }, 'url'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : { 'number' : 398,", ": { 'number' : 263, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_NV_fragment_program4' : { 'number' : 335, 'flags' : { 'public' }, 'supporters'", "'arbnumber' : 135, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness'", "'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : { 'esnumber'", "'esnumber' : 116, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader'", ": 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : { 'number' : 118, 'flags' : { 'incomplete'", "'obsolete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : {", ": { 'number' : 479, 'esnumber' : 242, 'flags' : { 'public' },", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : { 'number' : 321, 'flags'", ": 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : { 'number' : 264, 'flags' : { 'public'", ": { 'number' : 509, 'flags' : { 'public' }, 'supporters' : {", "'GL_OES_stencil_wrap' : { 'esnumber' : 19, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : { 'esnumber'", "'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : { 'flags' : { 'public' }, 'supporters'", "'number' : 204, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' :", "'GL_ARB_shader_texture_lod' : { 'arbnumber' : 60, 'flags' : { 'public' }, 'url' :", "'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber' : 34, 'flags'", "'esnumber' : 115, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8'", "}, 'GLX_SGI_swap_control' : { 'number' : 40, 'flags' : { 'public' }, 'supporters'", "'GL_ARB_vertex_program' : { 'arbnumber' : 26, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' :", "'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : { 'arbnumber' : 132, 'flags' : { 'public' },", ": 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias' : { 'number' : 84, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_interlace.txt', }, 'GL_SGIX_ir_instrument1' : { 'number' : 81, 'flags' : {", "'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : { 'number' : 477, 'flags' : { 'public' },", "509, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 183, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' :", ": 422, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": 187, 'esnumber' : 41, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber' : 114, 'flags' : { 'public'", "}, 'GL_EXT_disjoint_timer_query' : { 'esnumber' : 150, 'flags' : { 'public' }, 'url'", "'GL_EXT_shader_io_blocks' : { 'esnumber' : 180, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt',", "'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : { 'number' : 415, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt',", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader'", "39, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC',", "'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : { 'number' : 322, 'flags'", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image'", ": 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : { 'number' : 311, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : { 'number' : 364, 'flags' : {", "}, 'supporters' : { '3DFX', '3DL', 'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt', 'alias'", "}, 'GL_QCOM_extended_get' : { 'esnumber' : 62, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : { 'esnumber' :", "'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : { 'arbnumber' : 121, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : { 'number' : 302, 'flags' :", "'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : { 'number' : 9, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback'", "'url' : 'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : { 'number' : 147, 'flags' : {", "{ 'number' : 130, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'number' : 313, 'flags' : { 'public' }, 'supporters' : { '3DL' },", "{ 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : { 'esnumber' : 100,", "'GL_ARB_color_buffer_float' : { 'arbnumber' : 39, 'flags' : { 'public' }, 'supporters' :", "'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : { 'arbnumber' : 13, 'flags' : { 'public' },", "'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : { 'number' : 100, 'flags' : { 'public' },", "}, 'GL_OES_texture_compression_astc' : { 'esnumber' : 162, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : { 'number' : 503, 'esnumber' :", "'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' : { 'number' : 117, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : { 'esnumber' :", "}, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : { 'number' : 270, 'flags' :", "}, 'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : { 'esnumber' : 94, 'flags' :", "'number' : 211, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : { 'esnumber' : 160, 'flags'", "'GL_SGIX_texture_supersample' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : { 'arbnumber' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : { 'number' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', },", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' :", ": 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber' : 106, 'flags' : { 'public'", ": { 'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : { 'number' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : {", "}, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : { 'number' : 135, 'flags' :", ": 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : { 'number' : 114, 'flags' : { 'public'", "'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : { 'number' : 380, 'flags' : {", "}, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : { 'esnumber' : 270, 'flags' :", "'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' : { 'esnumber' : 72,", "}, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : { 'number' : 317, 'flags' :", "'GL_NV_texture_compression_s3tc' : { 'esnumber' : 128, 'flags' : { 'public' }, 'url' :", "}, 'GL_NV_float_buffer' : { 'number' : 281, 'flags' : { 'public' }, 'supporters'", "'GL_NV_light_max_exponent' : { 'number' : 189, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : { 'number'", "}, 'GL_ARB_shading_language_100' : { 'arbnumber' : 33, 'flags' : { 'public' }, 'supporters'", ": { 'arbnumber' : 9, 'flags' : { 'public' }, 'supporters' : {", "144, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : { 'number' : 266, 'flags'", "'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : { 'number' : 370, 'flags' : {", "}, 'GL_EXT_win32_keyed_mutex' : { 'number' : 506, 'esnumber' : 283, 'flags' : {", "}, 'GL_ATI_vertex_array_object' : { 'number' : 247, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : { 'esnumber'", ": { 'number' : 82, 'flags' : { 'incomplete' }, 'supporters' : {", ": 192, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' :", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', },", "}, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : { 'flags' : { 'incomplete' },", ": { 'number' : 399, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : { 'number'", "'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' : { 'incomplete', 'obsolete' },", "'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : { 'number' : 455, 'flags'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' :", "'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : { 'number' : 38, 'flags' : { 'public' },", "'number' : 391, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_EXT_stencil_wrap' : { 'number' : 176, 'flags' : { 'public' }, 'supporters'", "}, 'GL_ARB_conservative_depth' : { 'arbnumber' : 111, 'flags' : { 'public' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : { 'number' : 263,", "{ 'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : { 'esnumber' : 38,", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', },", "'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : { 'number' : 416, 'flags' : {", "{ 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization'", "'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : { 'number' : 481, 'esnumber' : 246,", "'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : { 'esnumber' : 244, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : { 'number' : 346, 'esnumber'", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion'", "'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt',", "}, 'GL_NV_vertex_program2_option' : { 'number' : 305, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 276, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : { 'esnumber' : 61, 'flags' :", "352, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url'", "{ 'number' : 205, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "{ 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : { 'number' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : { 'esnumber' : 178, 'flags'", "'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' : 250, 'flags' : { 'public' },", "}, 'GL_VIV_shader_binary' : { 'esnumber' : 85, 'flags' : { 'public' }, 'url'", "'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber' : 143, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' :", ": { 'arbnumber' : 105, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt',", "'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : { 'esnumber' : 63, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : { 'arbnumber' : 138, 'flags' :", ": 111, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' :", ": 36, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias' : {", "'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : { 'arbnumber' : 36, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : { 'arbnumber' : 56, 'flags' : {", "'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' : { 'number'", "}, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : { 'number' : 469, 'esnumber' :", "arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : { 'arbnumber' : 102, 'flags' : {", "}, 'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : { 'esnumber' : 200, 'flags' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : { 'number' : 100,", "'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : { 'arbnumber' : 30, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : { 'esnumber' : 211,", "'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : { 'number' : 512, 'flags' : { 'public' },", ": 295, 'esnumber' : 17, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number' : 485, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group'", "196, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : { 'esnumber' : 151,", "'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : { 'number' : 303, 'flags'", "}, 'GL_ARB_shader_clock' : { 'arbnumber' : 184, 'flags' : { 'public' }, 'url'", "'GL_ATI_vertex_attrib_array_object' : { 'number' : 290, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : { 'arbnumber'", ": 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber' : 63, 'flags' : { 'public'", ": 195, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' :", "'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : { 'number' : 261, 'flags' : {", "'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : { 'number' : 473, 'esnumber'", ": { 'esnumber' : 38, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt',", ": { 'esnumber' : 144, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt',", "'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : { 'arbnumber' : 146, 'flags'", "{ 'HP', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video' },", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', },", ": { 'number' : 298, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : {", "'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number' : 485, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : { 'arbnumber' : 115,", "'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that the OpenGL extension with the", ": 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : { 'number' : 356, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'DEC', 'HP', 'IBM', 'INGR', 'KGC', 'SGI'", "'number' : 376, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_NV_packed_depth_stencil' : { 'number' : 226, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : { 'esnumber' : 130, 'flags' : {", ": 7, 'flags' : { 'public' }, 'supporters' : { 'KGC', 'SGI' },", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : { 'number' : 235,", "'url' : 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : { 'flags' : { 'incomplete' }, 'url'", "45, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' : 149, 'flags'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : { 'arbnumber' : 117,", "46, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'arbnumber' : 44, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "31, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index'", "'esnumber' : 262, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation'", ": 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : { 'esnumber' : 25, 'flags' : { 'public'", "'esnumber' : 132, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil'", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : { 'number' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : {", "'GL_NV_platform_binary' : { 'esnumber' : 131, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : {", "'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : { 'flags' : { 'obsolete' }, 'url'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', },", "{ 'arbnumber' : 82, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', },", ": { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : { 'number' :", "}, 'GL_EXT_subtexture' : { 'number' : 9, 'flags' : { 'public' }, 'supporters'", ": 22, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : { 'number' : 188, 'flags' :", ": 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : { 'esnumber' : 133, 'flags' : { 'public'", "}, 'GL_ATI_vertex_attrib_array_object' : { 'number' : 290, 'flags' : { 'public' }, 'supporters'", "47, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'SGI' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : { 'number'", "}, 'GL_OES_primitive_bounding_box' : { 'esnumber' : 212, 'flags' : { 'public' }, 'url'", ": 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : { 'arbnumber' : 39, 'flags' : { 'public'", ": 151, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' :", "'MESA', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : { 'number' : 343,", "'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : { 'number'", ": 94, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' :", "324, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : { 'number' :", "{ 'number' : 332, 'esnumber' : 286, 'flags' : { 'public' }, 'supporters'", "'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : { 'arbnumber' : 84, 'flags' : { 'public' },", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : { 'number' : 134,", "}, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : { 'number' : 419, 'flags' :", "'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : { 'number' : 190, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' : 298,", "'esnumber' : 243, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile'", "'public' }, 'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : { 'arbnumber' : 91, 'flags'", "{ 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' : { 'esnumber' : 177, 'flags' :", "'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number' : 519, 'flags' : {", ": 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : { 'number' : 219, 'flags' : { 'incomplete'", "'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' : { 'number' : 253, 'flags' : {", "'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' :", "{ 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different", "}, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' : 253, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : { 'number' : 255, 'flags' :", "'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' : 96, 'flags' : { 'public' }, 'url' :", "'GL_PGI_misc_hints' : { 'number' : 77, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod'", "}, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : { 'number' : 450, 'flags' :", "'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : { 'number' : 281, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt',", "'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : { 'number' : 351, 'flags'", "'GL_ANGLE_framebuffer_multisample' : { 'esnumber' : 84, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : { 'esnumber'", ": 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : { 'esnumber' : 273, 'flags' : { 'public'", "}, 'GL_NV_evaluators' : { 'number' : 225, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : { 'arbnumber' :", "20, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : {", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : { 'number'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : { 'arbnumber' :", "'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : { 'number' : 240, 'flags' : {", ": 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : { 'flags' : { 'incomplete' }, 'url' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : { 'number' : 97,", "{ 'esnumber' : 72, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', },", ": 462, 'esnumber' : 226, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt',", ": 76, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : { 'number' : 225,", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', },", "{ 'arbnumber' : 132, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', },", "'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : { 'number' : 326, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' :", ": 391, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_ARB_texture_rg' : { 'arbnumber' : 53, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : { 'arbnumber' : 61, 'flags' : {", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : { 'number' : 389,", "'number' : 320, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA'", ": { 'esnumber' : 48, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt',", "}, 'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : { 'number' : 258, 'flags' :", "'number' : 516, 'esnumber' : 294, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : { 'number' : 376,", "'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : { 'esnumber' : 85, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', },", "'GL_NV_texture_border_clamp' : { 'esnumber' : 149, 'flags' : { 'public' }, 'url' :", "{ 'arbnumber' : 143, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias'", "{ 'arbnumber' : 119, 'esnumber' : 118, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : {", ": { 'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : { 'number' :", "}, 'GL_EXT_texture_env_combine' : { 'number' : 158, 'flags' : { 'public' }, 'supporters'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : { 'number' : 335,", ": { 'number' : 306, 'flags' : { 'public' }, 'supporters' : {", "161, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'IBM', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : { 'number'", "'ATI' }, 'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : { 'number' : 244, 'flags'", ": 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : { 'number' : 239, 'flags' : { 'public'", "'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' :", "{ 'number' : 120, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias'", "'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : { 'number' : 179, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : { 'arbnumber' : 156,", ": 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : { 'number' : 216, 'flags' : { 'public'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : {", "'esnumber' : 74, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary'", "'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt',", "'NVIDIA', 'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : { 'number' : 454,", ": { 'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : { 'number' :", "23, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : { 'number' : 173,", "'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' : { 'number' : 437, 'esnumber' : 161, 'flags' :", "}, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : { 'number' : 110, 'flags' :", "'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' :", ": { 'esnumber' : 112, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt',", "'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : { 'number'", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : { 'flags' : { 'incomplete'", "}, }, 'GL_OES_texture_mirrored_repeat' : { 'esnumber' : 22, 'flags' : { 'public' },", "'GL_NV_shader_atomic_int64' : { 'number' : 455, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt', },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : { 'number' : 503,", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : { 'number' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : {", "}, 'WGL_I3D_digital_video_control' : { 'number' : 250, 'flags' : { 'public' }, 'supporters'", "{ 'arbnumber' : 53, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : { 'number' : 33, 'flags'", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', },", ": 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb' : { 'esnumber' : 289, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt',", ": 30, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' :", ": { 'number' : 199, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : {", "'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : { 'number' : 164, 'flags' : {", "'number' : 63, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt', }, 'WGL_NV_render_texture_rectangle' : { 'number' : 264,", ": { 'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber' :", "'supporters' : { 'HP', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D'", ": 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : { 'number' : 57, 'flags' : { 'public'", "}, 'GL_EXT_texture_env' : { 'number' : 146, 'flags' : { 'public' }, 'url'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' :", ": 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber' : 193, 'flags' : { 'public'", "}, 'GL_EXT_transform_feedback2' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt',", "}, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : { 'arbnumber' : 97, 'flags' :", "'esnumber' : 186, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures'", "'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : { 'number' : 342, 'flags'", "'KGC', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : { 'number' : 44,", "'GL_EXT_texture_sRGB_decode' : { 'number' : 402, 'esnumber' : 152, 'flags' : { 'public'", "}, 'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : { 'arbnumber' : 56, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : { 'arbnumber'", ": 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : { 'arbnumber' : 81, 'flags' : { 'public'", "{ 'number' : 84, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'number' : 414, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'GL_EXT_shader_implicit_conversions' : { 'esnumber' : 179, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : { 'esnumber' : 63, 'flags'", ": { 'number' : 431, 'flags' : { 'public' }, 'supporters' : {", "423, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'arbnumber' : 95, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : {", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' :", ": 152, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' :", "'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : { 'number' : 85, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : { 'arbnumber' :", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : { 'esnumber' :", ": 'Draft extension which is referred to by some other vendor extensions, but", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt', },", "}, 'GL_OES_depth_texture' : { 'esnumber' : 43, 'flags' : { 'public' }, 'url'", "'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : { 'number' : 240, 'flags' : { 'public' },", "{ 'number' : 257, 'flags' : { 'public' }, 'supporters' : { 'SUN'", "}, 'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : { 'esnumber' : 15, 'flags' :", "'esnumber' : 46, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array'", "59, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : {", ": 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : { 'arbnumber' : 93, 'flags' : { 'public'", "397, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt',", "'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table'", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : { 'number' :", "'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : { 'number' : 61, 'flags' : { 'public' },", "38, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC',", "{ 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : { 'arbnumber' : 9,", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber' : 223,", "'esnumber' : 231, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample'", "278, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : {", ": { 'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt',", "'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : { 'number' : 277, 'flags' : {", "}, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : { 'number' : 413, 'flags' :", "{ 'public' }, 'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : { 'esnumber' : 19,", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt', }, 'GL_NV_depth_nonlinear'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt',", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : { 'flags' : {", "'GL_EXT_texture_env_dot3' : { 'number' : 220, 'flags' : { 'public' }, 'supporters' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' : { 'number' : 388,", ": 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : { 'number' : 417, 'flags' : { 'public'", "'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : { 'number' : 275, 'flags' : {", ": { 'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' :", "'GL_NV_shader_thread_group' : { 'number' : 447, 'flags' : { 'public' }, 'url' :", "}, 'GL_SGIS_line_texgen' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', },", "}, 'GL_ARB_half_float_vertex' : { 'arbnumber' : 48, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : { 'number' : 243,", "{ 'number' : 69, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'arbnumber' : 171, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object'", ": { 'arbnumber' : 110, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt',", "}, 'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : { 'number' : 224, 'flags' :", "}, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : { 'number' : 213, 'flags' :", "{ 'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', },", ": { 'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : { 'number' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context'", ": 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : { 'number' : 291, 'esnumber' : 4, 'flags'", "'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : { 'arbnumber' : 2, 'flags' : { 'public' },", "'GL_NV_shader_buffer_store' : { 'number' : 390, 'flags' : { 'public' }, 'supporters' :", ": 206, 'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url'", ": 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : { 'esnumber' : 206, 'flags' : { 'public'", "{ 'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : { 'number' : 164,", "38, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : { 'arbnumber' : 4, 'flags' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : { 'number'", "'number' : 9, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM',", ": 44, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'SGI'", "}, 'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : { 'number' : 93, 'flags' :", ": 87, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' :", "{ 'esnumber' : 25, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth32.txt', },", ": { 'number' : 125, 'flags' : { 'incomplete' }, 'supporters' : {", "'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : { 'number' : 399, 'flags' : { 'public' },", "{ 'ES', 'HP', 'IBM', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' :", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt', },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt',", "}, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : { 'esnumber' : 138, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : { 'arbnumber'", "'arbnumber' : 140, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod'", "{ 'number' : 14, 'flags' : { 'public' }, 'supporters' : { 'HP',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : {", "'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : { 'number' : 38, 'flags' : {", "'number' : 242, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : { 'arbnumber' : 27, 'flags'", "{ 'number' : 61, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'GL_SGIX_impact_pixel_texture' : { 'number' : 126, 'flags' : { 'incomplete' }, 'supporters'", ": 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : { 'number' : 185, 'flags' : { 'public'", "}, 'GL_ATI_text_fragment_shader' : { 'number' : 269, 'flags' : { 'public' }, 'supporters'", "'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : { 'esnumber' : 47, 'flags' : { 'public' },", "'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : { 'number' : 76, 'flags' : { 'public' },", "'IdSoftware', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' : 42,", "289, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "'GL_AMD_framebuffer_sample_positions' : { 'number' : 454, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : { 'arbnumber'", "'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : { 'number' : 200, 'flags' : { 'public' },", "{ 'number' : 394, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'supporters' : { 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : {", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', },", ": 40, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' :", "'GL_EXT_shader_implicit_conversions' : { 'esnumber' : 179, 'flags' : { 'public' }, 'url' :", ": { 'number' : 72, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt',", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : { 'number' : 49, 'flags'", "}, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : {", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : { 'flags' :", ": 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : { 'number' : 42, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : { 'arbnumber' : 81, 'flags' : {", ": { 'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : { 'number' :", "'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : { 'number' : 100, 'flags'", "'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : { 'arbnumber' : 113, 'flags' : { 'public' },", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', },", ": { 'incomplete' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', },", ": { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : { 'number' :", "}, 'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : { 'number' : 475, 'esnumber' :", ": { 'esnumber' : 255, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt',", ": { 'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : { 'esnumber' :", "179, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : {", ": 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : { 'number' : 375, 'flags' : { 'public'", ": 175, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' :", "}, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' : {", "'GL_EXT_sparse_texture' : { 'esnumber' : 240, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option'", "'GL_NV_depth_clamp' : { 'number' : 260, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_SGIX_image_compression' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt', },", "{ '3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : { 'number' : 404,", "4, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", ": 22, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": { 'esnumber' : 278, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt',", ": 78, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' :", "'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : { 'number' : 181, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control'", ": { 'number' : 251, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : { 'esnumber' : 115, 'flags' : { 'public'", "{ 'number' : 17, 'flags' : { 'public' }, 'supporters' : { 'ES',", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended'", "{ 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : { 'arbnumber' : 174,", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : { 'number' : 372, 'flags'", "'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : { 'number' : 374, 'flags' : {", "'alias' : { 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' : { 'arbnumber' : 26, 'flags'", "}, 'GL_ARB_color_buffer_float' : { 'arbnumber' : 39, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops'", "'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : { 'number' : 75, 'flags' : {", ": 89, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' :", "}, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : { 'number' : 38, 'flags' :", "{ 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : { 'esnumber'", "{ 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : {", "}, 'GL_AMD_compressed_3DC_texture' : { 'esnumber' : 39, 'flags' : { 'public' }, 'url'", "}, 'GL_IMG_shader_binary' : { 'esnumber' : 68, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : { 'number' : 507, 'flags' :", "'WGL_AMD_gpu_association' : { 'number' : 361, 'flags' : { 'public' }, 'supporters' :", "'GL_EXT_vertex_array_setXXX' : { 'flags' : { 'public' }, 'supporters' : { 'IBM' },", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : { 'number' : 275, 'flags'", "'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : { 'number' : 405, 'flags' : { 'public' },", ": 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' : 492, 'esnumber' : 266, 'flags'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : { 'flags' : {", "{ 'esnumber' : 141, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt', },", "'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : { 'arbnumber' : 104, 'flags' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : {", "'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : { 'esnumber' : 151, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : { 'arbnumber' : 145, 'flags'", "'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : { 'number' : 441, 'flags' : {", "'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : { 'number' : 460, 'esnumber' : 252, 'flags' :", "{ 'number' : 419, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'flags' : { 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' },", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : { 'number'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : { 'esnumber' : 66,", "'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : { 'esnumber' : 158, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation' },", "'GL_SGIX_fragments_instrument' : { 'number' : 180, 'flags' : { 'incomplete' }, 'supporters' :", "'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' : 135, 'flags' : { 'public' }, 'url' :", "'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : { 'number' : 384, 'flags' : { 'public' },", "'flags' : { 'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped but", "{ 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' : 140,", ": { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' : { 'GL_SGIS_color_range' }, },", "'arbnumber' : 115, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size'", ": { 'arbnumber' : 8, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_NV_gpu_program5_mem_extended' : { 'number' : 434, 'flags' : { 'public' }, 'supporters'", "'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : { 'number' : 305, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : { 'esnumber' : 93, 'flags' :", "'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : { 'flags' : {", "{ 'public' }, 'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : { 'arbnumber' : 175,", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample'", ": { 'arbnumber' : 57, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt',", "'number' : 509, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : { 'esnumber'", "228, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 242, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' :", "164, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : {", "'number' : 37, 'esnumber' : 65, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_OES_matrix_get' : { 'esnumber' : 11, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : { 'number' : 263, 'flags' : {", ": 451, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' :", "144, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : { 'esnumber' : 214,", "'esnumber' : 220, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync'", "'GL_NV_shader_atomic_fp16_vector' : { 'number' : 474, 'esnumber' : 261, 'flags' : { 'public'", "'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : { 'number' : 441, 'flags' : { 'public' },", "'esnumber' : 195, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt', },", "'number' : 468, 'esnumber' : 230, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt',", ": 73, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' :", ": { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : { 'number' :", "'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : { 'number' : 336, 'flags' : { 'public' },", ": { 'number' : 259, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : { 'esnumber' :", "'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : { 'number' : 392, 'flags' : { 'public' },", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : { 'number' : 126,", "}, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber' : 187, 'flags' : { 'public' }, 'url'", ": 134, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd' },", "'esnumber' : 270, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample'", "198, 'esnumber' : 154, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", ": 354, 'flags' : { 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3',", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent' },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : { 'number' : 447,", "63, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : {", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float'", ": 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : { 'number' : 154, 'flags' : { 'incomplete'", ": { 'number' : 128, 'flags' : { 'incomplete' }, 'supporters' : {", "}, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : { 'number'", "{ 'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : { 'number' : 476,", ": 449, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' :", "'esnumber' : 133, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier'", "'GL_SGIX_scalebias_hint' : { 'number' : 236, 'flags' : { 'incomplete' }, 'supporters' :", "}, 'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : { 'esnumber' : 105, 'flags' :", "'GL_OES_shader_multisample_interpolation' : { 'esnumber' : 172, 'flags' : { 'public' }, 'url' :", ": 47, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'SGI' },", ": { 'esnumber' : 62, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt',", ": { 'esnumber' : 100, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : { 'number' : 1, 'flags'", "'number' : 32, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI'", "}, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : { 'esnumber' : 103, 'flags' :", "'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : { 'number' : 163, 'flags'", ": { 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' :", "'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : { 'number' : 80, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : { 'number' : 282,", ": { 'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : { 'number' :", "}, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : { 'arbnumber' : 183, 'flags' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : { 'number' : 242,", "{ 'number' : 261, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : { 'esnumber' : 23, 'flags'", "'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : { 'esnumber' : 61, 'flags'", "{ 'arbnumber' : 147, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt', },", ": { 'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' :", "'GL_GREMEDY_frame_terminator' : { 'number' : 345, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : { 'number' : 265, 'flags' : {", "'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : { 'esnumber' : 89, 'flags'", "'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : { 'number' : 45, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' :", "'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : { 'number' : 319, 'flags' : {", "'GL_NV_gpu_program5_mem_extended' : { 'number' : 434, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : { 'esnumber' : 43, 'flags' :", "'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' : 149, 'flags' : {", "}, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' : { 'number' : 52, 'flags' :", "{ 'number' : 399, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' : { 'number' : 23, 'flags' :", "}, 'GL_EXT_paletted_texture' : { 'number' : 78, 'flags' : { 'public' }, 'supporters'", "'GL_OES_EGL_image_external' : { 'esnumber' : 87, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : { 'esnumber'", "'esnumber' : 216, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc'", "'GL_EXT_multi_draw_indirect' : { 'esnumber' : 205, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : { 'number' : 185, 'flags' : {", "'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : { 'arbnumber' : 179, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : { 'arbnumber' : 183, 'flags'", "{ 'esnumber' : 171, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', },", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : { 'flags' : { 'incomplete'", ": 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : { 'arbnumber' : 98, 'flags' : { 'public'", "'GL_SGIX_pixel_texture' : { 'number' : 499, 'flags' : { 'public' }, 'supporters' :", "189, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : { 'esnumber' : 94, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt', },", "122, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters'", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : { 'number'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : { 'number' : 112, 'flags'", ": 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : { 'esnumber' : 208, 'flags' : { 'public'", "}, 'GL_ARB_texture_stencil8' : { 'arbnumber' : 150, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' :", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : { 'esnumber' :", "177, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : { 'arbnumber'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : {", "}, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : { 'number' : 228, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber' : 129,", ": { 'arbnumber' : 181, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt',", "'number' : 421, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 453, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' :", "'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : { 'number' : 378, 'flags' : {", ": { 'number' : 126, 'flags' : { 'incomplete' }, 'supporters' : {", "'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : { 'number' : 386, 'flags'", "'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' : { 'number' : 348, 'flags' : { 'public'", "190, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : { 'esnumber' : 27,", ": { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : { 'number' :", "{ 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : { 'esnumber' : 75,", "'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : { 'flags' : { 'incomplete' }, 'url'", "'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : {", "'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : { 'arbnumber' : 23, 'flags' : {", "'esnumber' : 92, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence'", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : { 'number'", "'GL_NV_viewport_swizzle' : { 'number' : 483, 'esnumber' : 258, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN'", "{ 'number' : 297, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : {", "'esnumber' : 261, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64'", ": 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : { 'number' : 212, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : { 'number' : 126, 'flags' : {", "'supporters' : { 'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : { 'number'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt',", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : {", "'GLX_SGI_cushion' : { 'number' : 62, 'flags' : { 'public' }, 'supporters' :", ": 261, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : { 'number'", "}, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : { 'esnumber' : 23, 'flags' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' : 92,", "'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : { 'number' : 11, 'flags'", "}, 'url' : 'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : { 'number' : 137, 'flags' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' :", ": 481, 'esnumber' : 246, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt',", ": { 'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : { 'number' :", "'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : { 'number' : 67, 'flags' : {", "}, 'GL_SGIX_depth_texture' : { 'number' : 63, 'flags' : { 'public' }, 'supporters'", ": { 'arbnumber' : 43, 'flags' : { 'public' }, 'supporters' : {", "'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : { 'esnumber' : 134, 'flags' : { 'public' },", ": 412, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : { 'arbnumber' : 194,", "'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' : 173, 'flags' : { 'public' },", "{ 'esnumber' : 159, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', },", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : {", "{ 'number' : 348, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that the OpenGL extension with the same name string.',", "8, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : { 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' : { 'number'", "'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : { 'arbnumber' : 124, 'flags' : {", ": 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : { 'number' : 139, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : { 'number' :", "}, 'url' : 'extensions/ATI/ATI_element_array.txt', }, 'GL_ATI_envmap_bumpmap' : { 'number' : 244, 'flags' :", "'IMG', 'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect'", "'GL_EXT_texture_rg' : { 'esnumber' : 103, 'flags' : { 'public' }, 'url' :", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : { 'flags' : { 'incomplete'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt',", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : { 'arbnumber' :", "'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : { 'esnumber' : 216, 'flags'", "'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : { 'number' : 89, 'flags' : { 'incomplete' },", ": { 'number' : 495, 'flags' : { 'public' }, 'supporters' : {", "'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : { 'number' : 432, 'flags' : { 'public' },", ": 76, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' :", ": 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : { 'number' : 328, 'flags' : { 'public'", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : {", "'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : { 'number' : 460, 'esnumber' : 252,", "}, 'GL_SGIX_icc_texture' : { 'number' : 154, 'flags' : { 'incomplete' }, 'supporters'", "320 / 540 only.', }, 'GL_SGIX_ycrcba' : { 'number' : 203, 'flags' :", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : { 'number' : 159,", "'HP', 'IBM', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : { 'number'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber' : 201,", "'GLX_EXT_texture_from_pixmap' : { 'number' : 344, 'flags' : { 'public' }, 'supporters' :", "431, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", ": 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : { 'arbnumber' : 29, 'flags' : { 'public'", ": 425, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : { 'number' : 11, 'flags' : { 'public' },", ": { 'number' : 389, 'esnumber' : 260, 'flags' : { 'public' },", ": { 'number' : 61, 'flags' : { 'public' }, 'supporters' : {", "'arbnumber' : 39, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' : 58, 'flags' : { 'public' }, 'url' :", "'esnumber' : 44, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture'", ": 103, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' :", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' : 277,", "'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : { 'number' : 419,", "{ 'HP', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : {", "}, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : { 'arbnumber' : 172, 'flags' :", ": 341, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "150, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber' : 187, 'flags'", ": 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : { 'number' : 198, 'esnumber' : 154, 'flags'", "'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : { 'esnumber' : 202,", ": 10, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : { 'esnumber' : 176, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : { 'number' :", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : {", ": { 'arbnumber' : 146, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt',", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : { 'esnumber'", ": 419, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_envmap_bumpmap.txt', }, 'GL_ATI_fragment_shader' : { 'number' :", ": { 'number' : 376, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : { 'esnumber' : 148, 'flags'", "}, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : { 'number' : 282, 'flags' :", "'flags' : { 'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : { 'esnumber'", "'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : { 'number' : 135, 'flags'", "{ 'number' : 380, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : { 'number'", "{ 'number' : 161, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "209, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : {", "'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared extension number 15 with SGIS_pixel_texture.', },", "421, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'arbnumber' : 52, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' : { 'number' :", "}, 'GL_OES_geometry_shader' : { 'esnumber' : 210, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : { 'flags' : { 'incomplete' }, 'url'", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float' : { 'esnumber' :", "{ 'number' : 501, 'esnumber' : 274, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : { 'number' : 191, 'flags' : {", "'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : { 'number' : 161, 'flags' : {", "'GL_SGIX_fragment_specular_lighting' : { 'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', },", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared extension number 15 with", "'public' }, 'supporters' : { 'ES', 'INGR', 'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', },", "416, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "52, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "Workstation 320 / 540 only.', }, 'GL_SGIX_ycrcba' : { 'number' : 203, 'flags'", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' :", "{ 'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : { 'number' : 483,", "'number' : 83, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : { 'number' : 79, 'flags'", "}, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' : {", "}, 'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : { 'number' : 338, 'flags' :", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : { 'esnumber' :", "}, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : { 'number' : 290, 'flags' :", "'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : { 'number' :", "'number' : 348, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : { 'esnumber' : 293,", "'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : { 'arbnumber' : 43, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size' },", "{ 'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : { 'number' : 284,", "}, 'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : { 'number' : 6, 'flags' :", "57, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : {", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : { 'esnumber' : 80,", "'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : { 'number' : 115, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' :", "'esnumber' : 208, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24'", "'GL_ARB_sampler_objects' : { 'arbnumber' : 81, 'flags' : { 'public' }, 'url' :", ": 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : { 'number' : 206, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt',", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt',", "}, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber' : 63, 'flags' :", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : { 'esnumber' : 109, 'flags'", "'GL_ARB_sample_shading' : { 'arbnumber' : 70, 'flags' : { 'public' }, 'url' :", "'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : { 'arbnumber' : 71, 'flags' : { 'public' },", "but shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : { 'esnumber' : 90, 'flags' :", "'GL_NV_fragment_shader_interlock' : { 'number' : 468, 'esnumber' : 230, 'flags' : { 'public'", "'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : { 'esnumber' : 55, 'flags' : { 'public' },", "'number' : 50, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' : { 'number' : 171, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : { 'esnumber'", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture'", "'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP support.', }, 'GL_SGI_complex' : { 'number' : 87,", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt',", "{ 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : { 'number' : 515,", ": { 'esnumber' : 200, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_image_formats.txt',", "{ 'arbnumber' : 141, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', },", "}, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : { 'number' : 51, 'flags' :", ": { 'esnumber' : 277, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : {", "'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : { 'number' : 170, 'flags'", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : { 'number' :", ": { 'arbnumber' : 142, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : { 'number' : 317, 'flags'", ": 'extensions/SGIX/SGIX_impact_pixel_texture.txt', }, 'GL_SGIX_instrument_error' : { 'flags' : { 'incomplete' }, 'url' :", "'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber' : 84, 'flags' : { 'public' },", "WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' : { 'arbnumber' : 75,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : {", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture'", "}, 'GL_OES_mapbuffer' : { 'esnumber' : 29, 'flags' : { 'public' }, 'url'", "99, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : {", ": { 'esnumber' : 151, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt',", "162, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : {", "'number' : 298, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence' : { 'number' : 272, 'flags' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : { 'flags' : { 'incomplete'", "}, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' : 111, 'flags' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', },", "}, 'supporters' : { 'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : {", "'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : { 'number' : 374, 'flags' : { 'public' },", ": { 'DEC', 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt', },", "'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : { 'esnumber' : 138, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt',", ": 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' : { 'number' :", "193, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : { 'number' : 321, 'flags' :", "{ 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : { 'number'", ": { 'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' :", "508, 'esnumber' : 284, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt', },", ": { 'number' : 57, 'flags' : { 'public' }, 'supporters' : {", ": 117, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' :", "}, 'WGL_EXT_colorspace' : { 'number' : 498, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : { 'arbnumber' : 107, 'flags' : {", ": 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : { 'number' : 333, 'flags' : { 'public'", ": { 'number' : 138, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : { 'number' : 364,", "'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : { 'number' : 258, 'flags' : { 'public' },", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : { 'number'", "}, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : { 'number' : 454, 'flags' :", "{ 'number' : 117, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' : { 'arbnumber' : 181, 'flags' : {", "'GL_NV_stereo_view_rendering' : { 'number' : 489, 'esnumber' : 296, 'flags' : { 'public'", ": 1, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI',", "{ 'arbnumber' : 45, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias'", "'esnumber' : 283, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles'", "}, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' :", "'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : { 'number' : 118, 'flags' : {", "'GL_REND_screen_coordinates' : { 'number' : 155, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : { 'number'", "{ 'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : { 'esnumber' : 57,", ": 461, 'esnumber' : 225, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt',", "'number' : 60, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : { 'arbnumber' : 152,", ": 'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' : { 'esnumber' :", "'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : { 'number' : 165, 'flags'", "53, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'GL_NV_parameter_buffer_object' : { 'number' : 339, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : { 'number'", "{ 'esnumber' : 77, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_texture_lod.txt', },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', },", ": { 'number' : 387, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : { 'number' : 227, 'flags' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : { 'arbnumber' : 98,", "}, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber' : 42, 'flags' :", "'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : { 'number' :", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : {", "{ 'number' : 365, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA',", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', }, 'GL_SGIX_sprite' :", "'GL_ARB_texture_rg' : { 'arbnumber' : 53, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : { 'number' : 342, 'flags' : { 'public'", "'GL_ARB_framebuffer_sRGB' : { 'arbnumber' : 46, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : { 'flags'", ": 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : { 'arbnumber' : 190, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : { 'number'", ": 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : { 'number' : 233, 'flags' : { 'public'", "'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : { 'number' : 214, 'flags' : {", ": 24, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "}, 'GL_MESA_resize_buffers' : { 'number' : 196, 'flags' : { 'public' }, 'supporters'", "'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : { 'number' : 151, 'flags' : { 'incomplete' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : { 'number'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber' : 177,", "}, 'GLX_EXT_visual_info' : { 'number' : 28, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : { 'flags' : { 'incomplete' },", "'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : { 'number' : 385, 'flags' : {", "}, 'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : { 'number' : 315, 'flags' :", "'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : { 'number' : 199, 'flags'", "203, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : { 'number'", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample'", "'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : { 'number' : 280, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : { 'number' :", "}, 'GL_NV_present_video' : { 'number' : 347, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : { 'arbnumber' : 79, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : { 'number' : 287, 'flags'", ": 212, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' :", ": { 'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : { 'esnumber' :", "'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' : { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' : {", "}, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : { 'arbnumber' : 54, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber'", "{ 'number' : 253, 'flags' : { 'public' }, 'supporters' : { 'I3D'", ": { 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI' }, 'url'", ": { 'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : { 'number' :", "'number' : 422, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : { 'number' : 332, 'esnumber' : 286,", ": 384, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", ": 421, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "63, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url'", "}, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : {", "'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : { 'number' : 435, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : { 'number' : 466, 'esnumber'", ": { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : { 'flags'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt', }, 'GL_EXT_texture_rg' : { 'esnumber'", "'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : { 'number' : 497, 'flags' : { 'public' },", "108, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle' : {", ": 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : { 'number' : 347, 'flags' : { 'public'", ": { 'esnumber' : 192, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt',", ": { 'arbnumber' : 118, 'esnumber' : 117, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' :", "}, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : { 'arbnumber' : 82, 'flags' :", "85, 'flags' : { 'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : {", ": 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : { 'esnumber' : 92, 'flags' : { 'public'", "}, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : {", "'arbnumber' : 138, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync'", "'GL_INTEL_performance_query' : { 'number' : 443, 'esnumber' : 164, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow'", "}, 'GL_EXT_secondary_color' : { 'number' : 145, 'flags' : { 'public' }, 'supporters'", "}, 'GL_EXT_texture_swizzle' : { 'number' : 356, 'flags' : { 'public' }, 'supporters'", ": 180, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' :", ": 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP support.', }, 'GL_SGI_complex' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : { 'arbnumber'", ": 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : { 'arbnumber' : 180, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : { 'esnumber' :", "'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : { 'esnumber' : 82, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : { 'number' : 191,", "{ 'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : { 'arbnumber' : 192,", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : { 'number' :", "283, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : {", ": 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : { 'esnumber' : 131, 'flags' : { 'public'", "'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : { 'number' :", "{ 'number' : 321, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "}, }, 'GL_EXT_texture_compression_bptc' : { 'esnumber' : 287, 'flags' : { 'public' },", "'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', },", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot'", "{ 'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : { 'esnumber' : 85,", ": 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : { 'number' : 404, 'flags' : { 'public'", ": { 'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : { 'number' :", "'GL_ARB_gpu_shader_int64' : { 'arbnumber' : 178, 'flags' : { 'public' }, 'url' :", "}, 'GL_NV_deep_texture3D' : { 'number' : 424, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : { 'arbnumber' : 145, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : { 'arbnumber'", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : { 'number' : 213, 'flags'", "{ 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' : { 'arbnumber' : 1, 'flags' :", "'number' : 304, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' : 79, 'flags' : { 'public' },", ": 353, 'flags' : { 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3',", "'GL_ARB_texture_multisample' : { 'arbnumber' : 67, 'flags' : { 'public' }, 'url' :", "'number' : 492, 'esnumber' : 266, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : { 'esnumber' : 135,", "'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : { 'number' : 389, 'esnumber'", "'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' :", "'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : { 'esnumber' : 25, 'flags' : { 'public' },", "149, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : {", "'esnumber' : 26, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette'", ": { 'number' : 489, 'esnumber' : 296, 'flags' : { 'public' },", "'arbnumber' : 50, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', },", "{ 'esnumber' : 61, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : { 'esnumber'", "'alias' : { 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' : { 'esnumber' : 224, 'flags'", ": { 'number' : 320, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : { 'number' : 37, 'esnumber' : 65,", "}, 'GLX_ARB_create_context' : { 'arbnumber' : 56, 'flags' : { 'public' }, 'url'", "'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : { 'number' : 249, 'flags' : { 'public' },", ": { 'number' : 145, 'flags' : { 'public' }, 'supporters' : {", ": 97, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' :", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : { 'esnumber' : 176,", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' :", "{ 'number' : 381, 'esnumber' : 271, 'flags' : { 'public' }, 'supporters'", "'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : { 'esnumber' : 270, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : { 'esnumber' : 130,", "'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : { 'number' : 520, 'esnumber' : 122, 'flags' :", ": 262, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : {", "}, 'supporters' : { 'INGR', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' :", "301, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", ": 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : { 'arbnumber' : 18, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : { 'arbnumber' : 127, 'flags'", "{ 'number' : 42, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'supporters' : { 'HP', 'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' :", ": { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : { 'flags' :", "'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : { 'esnumber' : 181, 'flags' : {", ": 'extensions/EXT/EXT_scene_marker.txt', 'alias' : { 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' : { 'number' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : { 'number'", "'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' : 62, 'flags' : { 'public' },", ": { 'KHR' }, 'url' : 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : { 'number' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', },", "}, 'GL_EXT_packed_depth_stencil' : { 'number' : 312, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : { 'number'", "'number' : 236, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : {", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : { 'number'", ": 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : { 'number' : 148, 'esnumber' : 69, 'flags'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' : { 'number'", "'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : { 'number' : 155, 'flags' : {", ": 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : { 'number' : 15, 'flags' : { 'public'", "}, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : { 'esnumber' : 57, 'flags' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : { 'number' : 465, 'esnumber'", "'number' : 263, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' : 291,", "{ 'number' : 393, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements'", "}, 'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : { 'number' : 193, 'flags' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video'", "'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : { 'esnumber' : 219, 'flags' : {", "441, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' :", "'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : { 'number' : 481, 'esnumber'", "}, 'GL_EXT_separate_shader_objects' : { 'number' : 377, 'esnumber' : 101, 'flags' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : { 'esnumber'", "'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : { 'esnumber' : 210, 'flags'", ": { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : { 'number'", "{ 'esnumber' : 165, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : { 'esnumber'", ": 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : { 'number' : 381, 'esnumber' : 271, 'flags'", "'ES', 'HP', 'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : { 'number' :", ": 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : { 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read'", ": { 'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : { 'esnumber' :", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : { 'esnumber' :", "'extensions/HP/HP_image_transform.txt', }, 'GL_HP_occlusion_test' : { 'number' : 137, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out'", ": { 'number' : 2, 'flags' : { 'public' }, 'supporters' : {", ": { 'arbnumber' : 165, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt',", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array'", "}, 'GL_ARB_clear_buffer_object' : { 'arbnumber' : 121, 'flags' : { 'public' }, 'url'", "'number' : 311, 'flags' : { 'public' }, 'supporters' : { 'GREMEDY' },", "'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber' : 194, 'flags' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range'", "'number' : 188, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : { 'number' : 349, 'flags' : {", "'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : { 'arbnumber' : 154, 'flags' : { 'public' },", "'GL_SGIS_detail_texture' : { 'number' : 21, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : { 'flags'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : { 'number' : 312,", ": 'extensions/EXT/EXT_multiple_textures.txt', }, 'GL_EXT_multisample_compatibility' : { 'esnumber' : 248, 'flags' : { 'public'", "'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : { 'number' : 32, 'flags' : { 'public' },", "}, 'GL_EXT_memory_object_win32' : { 'number' : 505, 'esnumber' : 282, 'flags' : {", "'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : { 'esnumber' : 94, 'flags'", "{ 'number' : 152, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', },", ": 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : { 'arbnumber' : 150, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : { 'arbnumber' :", "{ 'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : { 'number' : 314,", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt',", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', },", "164, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' :", ": 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : { 'arbnumber' : 123, 'flags' : { 'public'", "309, 'esnumber' : 49, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", "385, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "460, 'esnumber' : 252, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', },", ": 471, 'esnumber' : 234, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt',", "}, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : { 'number' : 284, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber'", "'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : { 'number' : 221, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : { 'number' : 90, 'flags' :", "'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : { 'number' : 400, 'flags' : { 'public' },", "'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : { 'arbnumber' : 145, 'flags' : { 'public' },", ": 109, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' :", "'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : { 'number' : 354,", "}, 'GL_ARB_texture_rectangle' : { 'arbnumber' : 38, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : { 'arbnumber' : 184,", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt',", ": 224, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : { 'arbnumber' : 178,", "'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : { 'arbnumber' : 2, 'flags' : {", "405, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : { 'flags' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env'", ": 156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber' :", ": { 'esnumber' : 132, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt',", "}, 'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' : 165, 'flags' :", ": { 'flags' : { 'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' :", "{ 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', },", "}, 'GL_NV_fbo_color_attachments' : { 'esnumber' : 92, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt', }, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' :", "}, 'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : { 'number' : 178, 'flags' :", "}, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : { 'number' : 429, 'flags' :", "'esnumber' : 80, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', }, 'GL_APPLE_texture_packed_float'", "{ 'esnumber' : 88, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt', },", "{ 'esnumber' : 95, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', },", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber'", "}, 'supporters' : { 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', },", "{ 'number' : 373, 'esnumber' : 76, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', }, 'GL_MESAX_texture_stack' : { 'number'", "'number' : 58, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "136, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : {", "'number' : 270, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : { 'number' :", ": 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : { 'number' : 326, 'flags' : { 'public'", "146, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt', }, 'GL_EXT_texture_env_add' : {", "}, 'GL_SGIX_resample' : { 'number' : 212, 'flags' : { 'public' }, 'supporters'", ": { 'esnumber' : 1, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt',", "{ 'public' }, 'url' : 'extensions/ARB/ARB_compressed_texture_pixel_storage.txt', }, 'GL_ARB_compute_shader' : { 'arbnumber' : 122,", "'supporters' : { 'HP', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : { 'arbnumber' :", ": 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object' : { 'number' : 302, 'flags' : { 'public'", "185, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : {", "}, 'GL_NV_fill_rectangle' : { 'number' : 466, 'esnumber' : 232, 'flags' : {", ": 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : { 'arbnumber' : 152, 'flags' : { 'public'", "'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : { 'flags' : { 'public' }, 'supporters' : {", "'GL_AMD_seamless_cubemap_per_texture' : { 'number' : 383, 'flags' : { 'public' }, 'supporters' :", "'number' : 486, 'esnumber' : 295, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'HP', 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', },", "}, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : { 'number' : 455, 'flags' :", "}, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' : 169, 'esnumber' : 189, 'flags' : {", "}, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' :", "}, 'GL_EXT_bgra' : { 'number' : 129, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : { 'arbnumber' :", ": { 'arbnumber' : 83, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt',", "'GL_OES_vertex_half_float' : { 'esnumber' : 38, 'flags' : { 'public' }, 'url' :", "}, 'GL_OES_standard_derivatives' : { 'esnumber' : 45, 'flags' : { 'public' }, 'url'", "}, 'GL_AMD_sample_positions' : { 'number' : 405, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : { 'esnumber' : 147, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : { 'number'", "}, 'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : { 'number' : 325, 'flags' :", "351, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 440, 'esnumber' : 99, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_EXT_multiview_draw_buffers' : { 'esnumber' : 125, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : { 'arbnumber' :", ": 288, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : { 'arbnumber'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : { 'arbnumber' : 150,", "{ 'arbnumber' : 177, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber'", "'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : { 'number' : 42, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber' : 63, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : { 'number'", "{ 'esnumber' : 166, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', },", ": 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : { 'number' : 74, 'flags' : { 'public'", "'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : { 'esnumber' : 155, 'flags' : {", ": { 'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' :", ": { 'number' : 56, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 90, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', },", "{ 'esnumber' : 287, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 196, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' :", ": 292, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : { 'number' : 60, 'flags' :", "{ 'arbnumber' : 104, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt', },", ": { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber'", ": { 'number' : 303, 'flags' : { 'public' }, 'supporters' : {", ": 147, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' :", ": { 'number' : 380, 'flags' : { 'public' }, 'supporters' : {", "{ 'arbnumber' : 108, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', },", "}, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : { 'number' : 443, 'esnumber' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : { 'number'", "}, 'GLX_AMD_gpu_association' : { 'number' : 398, 'flags' : { 'public' }, 'supporters'", "}, 'GL_ARB_depth_texture' : { 'arbnumber' : 22, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : { 'number' : 286, 'flags'", ": { 'esnumber' : 173, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt',", "}, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : { 'arbnumber' : 8, 'flags' :", ": { 'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : { 'number' :", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : { 'number' : 82,", "'GL_WIN_specular_fog' : { 'number' : 114, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 293, 'esnumber' : 18, 'flags' : { 'public' },", "274, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : { 'number' : 243, 'flags'", "'esnumber' : 142, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : { 'number'", ": { 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', },", "'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : { 'esnumber' : 40, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : { 'arbnumber'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : { 'number'", "'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : { 'esnumber' : 109, 'flags' : {", ": 118, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : {", "SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : { 'number' : 127, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber' : 201, 'flags'", "}, 'GL_ARB_vertex_buffer_object' : { 'arbnumber' : 28, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' :", "'flags' : { 'public' }, 'supporters' : { 'IBM', 'INGR' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : { 'number' : 364, 'flags' :", "{ 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read'", "}, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' :", "{ 'number' : 304, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' :", "{ 'number' : 483, 'esnumber' : 258, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' :", "'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : { 'number' : 443, 'esnumber' : 164, 'flags' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : { 'number' : 468,", "{ 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : { 'arbnumber' : 20,", "'number' : 395, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : { 'esnumber' : 256, 'flags'", ": 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : { 'number' : 197, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress'", "181, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size'", "'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : { 'number'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', },", "'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : { 'number' : 367, 'flags' : {", "{ 'arbnumber' : 56, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments'", "'GL_EXT_index_material' : { 'number' : 94, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' : 253, 'flags' : {", ": { 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' : { 'number' : 22, 'flags' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : { 'number' : 219,", "}, }, 'GL_EXT_memory_object_fd' : { 'number' : 504, 'esnumber' : 281, 'flags' :", ": { 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' : { 'number' : 324, 'flags' :", ": 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber' : 74, 'flags' : { 'public'", "'esnumber' : 201, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object'", "'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' : { 'number'", "'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' : { 'esnumber' : 177, 'flags' : {", ": 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : { 'number' : 521, 'esnumber' : 300, 'flags'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber'", "'GL_SGI_color_matrix' : { 'number' : 13, 'flags' : { 'public' }, 'supporters' :", "{ 'esnumber' : 112, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', },", "}, 'GL_HP_occlusion_test' : { 'number' : 137, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'MS', 'SGI' }, 'url' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' :", "{ 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', },", "'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : { 'number' : 448, 'flags' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : { 'esnumber' : 7,", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : { 'number' : 212, 'flags'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : { 'number' : 374, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : { 'arbnumber'", ": 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : { 'esnumber' : 272, 'flags' : { 'public'", "'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : { 'number' : 316, 'flags'", "'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : { 'number' : 142, 'flags' : { 'public' },", "33, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : {", ": { 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI', 'SUN' },", "}, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : { 'number' : 371, 'flags' :", "'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : { 'number' : 45, 'flags' : {", "288, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : { 'arbnumber' : 120,", "}, 'GL_NV_fence' : { 'number' : 222, 'esnumber' : 52, 'flags' : {", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture'", "'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber' : 54, 'flags' : {", "'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : { 'arbnumber' : 73, 'flags' : {", ": 433, 'esnumber' : 163, 'flags' : { 'public' }, 'supporters' : {", "'3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : { 'esnumber' :", "}, }, 'GL_ANGLE_texture_usage' : { 'esnumber' : 112, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : { 'esnumber' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient'", "OpenGL extension with the same name string.', }, 'GL_EXT_separate_specular_color' : { 'number' :", "'GL_EXT_vertex_shader' : { 'number' : 248, 'flags' : { 'public' }, 'url' :", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' :", ": 145, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' :", "'WGL_EXT_make_current_read' : { 'number' : 169, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : { 'number' : 247, 'flags' :", "{ 'number' : 92, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : { 'arbnumber' : 104, 'flags' :", "}, 'WGL_AMD_gpu_association' : { 'number' : 361, 'flags' : { 'public' }, 'supporters'", "'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : { 'number' : 414, 'flags' : { 'public' },", "'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : { 'esnumber' : 100, 'flags' : { 'public' },", ": { 'esnumber' : 270, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt',", ": { 'arbnumber' : 121, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt',", ": 179, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, }, 'GL_NV_coverage_sample' : { 'esnumber' : 72, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : { 'esnumber' : 159, 'flags'", ": { 'arbnumber' : 188, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : { 'number' : 188, 'flags'", ": 177, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' :", "'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : { 'number' : 249, 'flags'", ": { 'public' }, 'supporters' : { 'GOOGLE', 'NVIDIA', 'VMware' }, 'url' :", "'GL_OES_blend_equation_separate' : { 'esnumber' : 1, 'flags' : { 'public' }, 'url' :", "501, 'esnumber' : 274, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'arbnumber' : 169, 'esnumber' : 189, 'flags' : { 'public' }, 'url'", "{ 'number' : 471, 'esnumber' : 234, 'flags' : { 'public' }, 'url'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' : { 'arbnumber' : 195, 'flags'", "{ 'number' : 271, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : { 'esnumber'", "'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : { 'number' : 268, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : { 'number' : 477, 'flags'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : { 'number'", ": 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : { 'esnumber' : 218, 'flags' : { 'public'", ": { 'number' : 226, 'flags' : { 'public' }, 'supporters' : {", "'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : { 'esnumber' : 156, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt', },", "379, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' : { 'number' : 504,", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : { 'arbnumber'", "}, 'supporters' : { 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt', },", ": 134, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' :", "}, 'GL_SGIX_ycrcba' : { 'number' : 203, 'flags' : { 'incomplete' }, 'supporters'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt', },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : { 'number' : 433, 'esnumber'", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : { 'number' : 367,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments'", ": 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : { 'arbnumber' : 65, 'flags' : { 'public'", "'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' : { 'arbnumber' : 47, 'flags' : {", "'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : { 'number' : 175, 'flags'", "}, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : { 'arbnumber' : 150, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' : 173,", "'GL_EXT_texture_compression_rgtc' : { 'number' : 332, 'esnumber' : 286, 'flags' : { 'public'", "{ 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : { 'number' : 255,", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : { 'esnumber' :", "'public' }, 'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : { 'esnumber' : 67, 'flags'", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map.txt', }, 'GL_OES_texture_cube_map_array' : { 'esnumber' : 217,", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch'", "'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : { 'arbnumber' : 127, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber'", "'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' : { 'number' : 171,", "'number' : 233, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "42, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "152, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : {", "'arbnumber' : 63, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : { 'flags'", ": { 'GLX_EXT_fbconfig_packed_float', 'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' : { 'number' : 23, 'flags'", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', },", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : { 'arbnumber'", "'esnumber' : 51, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override'", "}, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' : 276, 'flags' :", "}, 'GL_AMD_gcn_shader' : { 'number' : 453, 'flags' : { 'public' }, 'url'", "79, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' :", "'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : { 'arbnumber' : 141, 'flags' : {", "'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : { 'number' : 277, 'flags' : { 'public' },", ": 'extensions/NV/NV_bindless_multi_draw_indirect.txt', }, 'GL_NV_bindless_multi_draw_indirect_count' : { 'number' : 456, 'flags' : { 'public'", ": { 'number' : 4, 'flags' : { 'public' }, 'supporters' : {", ": 288, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' :", "'number' : 465, 'esnumber' : 228, 'flags' : { 'public' }, 'url' :", "62, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt',", "276, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5'", "420, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", ": 365, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : { 'arbnumber'", "}, 'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' : 96, 'flags' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : { 'number' :", "}, 'GL_ARB_transform_feedback3' : { 'arbnumber' : 94, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' : 166,", "'esnumber' : 193, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', },", "'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : { 'number' : 94, 'flags' : { 'public' },", ": 29, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : { 'arbnumber' : 144,", "}, 'GL_EXT_framebuffer_object' : { 'number' : 310, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object'", "'GL_ATI_map_object_buffer' : { 'number' : 288, 'flags' : { 'public' }, 'supporters' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : { 'number' : 337, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : {", "}, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : { 'number' : 233, 'flags' :", "{ 'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt', }, 'GL_EXT_bindable_uniform' : { 'number' : 342,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' : { 'number' : 444,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2'", ": { 'esnumber' : 87, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt',", "'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : { 'esnumber' : 124, 'flags' : { 'public' },", "'GL_OES_tessellation_shader' : { 'esnumber' : 214, 'flags' : { 'public' }, 'url' :", "178, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : {", "'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer' :", "{ 'number' : 52, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : { 'number' : 335, 'flags' : { 'public'", "}, 'GL_ARB_shader_image_size' : { 'arbnumber' : 136, 'flags' : { 'public' }, 'url'", "'esnumber' : 242, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : { 'esnumber'", "}, 'GL_APPLE_texture_range' : { 'number' : 367, 'flags' : { 'public' }, 'supporters'", "}, }, 'GL_KHR_context_flush_control' : { 'arbnumber' : 168, 'esnumber' : 191, 'flags' :", ": 16, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : { 'arbnumber' : 25,", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_resample.txt',", "'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : { 'number' : 224, 'flags' : {", "}, 'GL_ARB_separate_shader_objects' : { 'arbnumber' : 97, 'flags' : { 'public' }, 'url'", "469, 'esnumber' : 231, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', },", "'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : { 'esnumber' : 132, 'flags' : { 'public' },", "19, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : { 'number'", "}, 'GL_ARB_texture_env_crossbar' : { 'arbnumber' : 18, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : { 'number'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : { 'esnumber' : 131,", "'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : { 'arbnumber' : 64, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : { 'arbnumber' : 65, 'flags' :", "{ 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render'", "'GL_EXT_texture_buffer_object' : { 'number' : 330, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 34, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : { 'arbnumber'", "'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : { 'arbnumber' : 158, 'flags' : { 'public' },", ": { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : { 'number'", "449, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : {", "'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt',", "'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : { 'number' : 375, 'flags' : { 'public' },", "'esnumber' : 295, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list'", ": 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : { 'esnumber' : 55, 'flags' : { 'public'", "'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : { 'arbnumber' : 9, 'flags' : { 'public' },", "470, 'esnumber' : 233, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', },", ": { 'esnumber' : 218, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_view.txt',", "'number' : 28, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'KGC',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : {", "'WGL_I3D_genlock' : { 'number' : 252, 'flags' : { 'public' }, 'supporters' :", "'GL_NV_read_depth_stencil' : { 'esnumber' : 94, 'flags' : { 'public' }, 'url' :", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : { 'number' : 280,", ": 154, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : {", "'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : { 'arbnumber'", "'esnumber' : 203, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra'", ": 390, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : { 'number' : 291, 'esnumber' : 4,", "'number' : 150, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'WGL_ATI_pixel_format_float' : { 'number' : 278, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : { 'number' : 457, 'flags' :", "'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : { 'number' : 349, 'flags'", "'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : { 'esnumber' : 108, 'flags'", ": { 'arbnumber' : 132, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt',", "'GL_AMD_conservative_depth' : { 'number' : 385, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 174, 'flags' : { 'public' }, 'supporters' : { 'INGR'", "160, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "}, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : { 'number' : 158, 'flags' :", ": 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : { 'number' : 218, 'flags' : { 'public'", "}, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : { 'arbnumber' : 11, 'flags' :", "277, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : { 'number' : 44, 'flags'", "'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber' : 143, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber'", "'arbnumber' : 43, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": 231, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' :", "}, 'GL_OES_paletted_texture' : { 'esnumber' : 13, 'flags' : { 'incomplete', 'private' },", "support.', }, 'GL_SGI_complex' : { 'number' : 87, 'flags' : { 'incomplete' },", "44, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'GL_NV_conservative_raster_dilate' : { 'number' : 480, 'flags' : { 'public' }, 'url' :", ": 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' : { 'esnumber' : 15, 'flags' : { 'public'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : {", ": { 'number' : 409, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : { 'number' : 82, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : { 'arbnumber' :", "'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high' : { 'esnumber' : 28, 'flags' : {", "'arbnumber' : 131, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata'", "'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' : 56, 'flags' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp'", ": { 'number' : 116, 'flags' : { 'obsolete' }, 'supporters' : {", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : { 'number' : 405,", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' :", "}, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber' : 176, 'flags' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt', },", "'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to WGL_ARB_create_context_profile not needed -", "'supporters' : { '3DL', 'ATI', 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB'", ": 33, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : { 'arbnumber' : 55, 'flags' : { 'public' },", ": 58, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' :", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : { 'number' :", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : { 'number' : 321,", ": { 'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : { 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : { 'number'", "}, 'GL_ARB_direct_state_access' : { 'arbnumber' : 164, 'flags' : { 'public' }, 'url'", "{ 'number' : 78, 'flags' : { 'public' }, 'supporters' : { 'MS',", "}, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' : 185, 'flags' : { 'public' }, 'url'", "'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : { 'number' : 404, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object'", "10, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'arbnumber' : 186, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt',", "with arbnumber 56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : { 'arbnumber' : 101, 'flags' :", ": { 'public' }, 'supporters' : { '3DL', 'ATI', 'INTEL', 'NVIDIA' }, 'url'", "}, 'GL_EXT_texture_env_dot3' : { 'number' : 220, 'flags' : { 'public' }, 'supporters'", "'number' : 501, 'esnumber' : 274, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed'", "'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : { 'esnumber' : 134, 'flags'", "{ 'number' : 43, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'number' : 310, 'flags' : { 'public' }, 'supporters' : { '3DL',", "'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : { 'esnumber' : 61, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip'", "{ 'arbnumber' : 175, 'esnumber' : 243, 'flags' : { 'public' }, 'url'", "{ 'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : { 'esnumber' : 89,", ": { 'arbnumber' : 194, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt',", "'esnumber' : 124, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot'", ": { 'arbnumber' : 107, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt',", "'GL_OES_blend_func_separate' : { 'esnumber' : 2, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : { 'arbnumber' : 57, 'flags' :", ": 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : { 'number' : 345, 'flags' : { 'public'", "'GL_NV_depth_buffer_float' : { 'number' : 334, 'flags' : { 'public' }, 'supporters' :", "'GL_ARB_texture_env_combine' : { 'arbnumber' : 17, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : { 'arbnumber' : 12, 'flags' : { 'public'", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : { 'esnumber' :", "}, 'GL_EXT_EGL_image_storage' : { 'number' : 522, 'esnumber' : 301, 'flags' : {", "225, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : { 'number' : 494, 'flags' :", "16, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture' : { 'number' : 4, 'flags' : {", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : {", "}, 'GL_EXT_blend_minmax' : { 'number' : 37, 'esnumber' : 65, 'flags' : {", "}, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : { 'flags' : { 'incomplete' },", "'GL_EXT_gpu_shader4' : { 'number' : 326, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : { 'number' : 467, 'esnumber' : 229, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' : { 'esnumber' :", ": 278, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' :", "'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that the OpenGL extension with the same", "487, 'esnumber' : 262, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', },", "'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : { 'arbnumber' : 59, 'flags' : {", "'GL_SGIX_bali_g_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments'", "'number' : 472, 'esnumber' : 235, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : {", "'url' : 'extensions/OES/OES_required_internalformat.txt', }, 'GL_OES_rgb8_rgba8' : { 'esnumber' : 30, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : { 'number'", ": 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' : 155, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : { 'arbnumber' : 156, 'flags'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : {", ": { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : { 'number' :", "{ 'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : { 'number' : 167,", "'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : { 'number' : 168, 'flags' : { 'public' },", "'number' : 517, 'esnumber' : 297, 'flags' : { 'public' }, 'url' :", ": 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : { 'esnumber' : 20, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : { 'number' : 226, 'flags'", ": { 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt',", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt',", "267, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, }, 'WGL_EXT_depth_float' : { 'number' : 177, 'flags' : { 'public' },", "464, 'esnumber' : 227, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', },", "'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : { 'number' : 199, 'flags' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : { 'number' : 490,", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : { 'number' :", "}, 'GL_OES_stencil_wrap' : { 'esnumber' : 19, 'flags' : { 'public' }, 'url'", "'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : { 'esnumber' : 77, 'flags' : { 'public' },", ": 161, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url'", ": 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : { 'number' : 99, 'flags' : { 'incomplete'", ": 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : { 'number' : 92, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : { 'number'", "'arbnumber' : 32, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "{ 'esnumber' : 148, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt', },", "'GL_EXT_geometry_shader' : { 'esnumber' : 177, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' : 46,", "}, 'GL_SGIX_vector_ops' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', },", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt',", "'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : { 'number' : 269, 'flags'", "'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt',", "{ 'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : { 'number' : 472,", "'arbnumber' : 2, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' : { 'number' : 225, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber' : 142, 'flags' :", "which is referred to by some other vendor extensions, but shipped as ARB_transform_feedback2.',", "}, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' : { 'incomplete', 'obsolete'", "'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : { 'arbnumber' : 170, 'esnumber' : 190,", ": 336, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : { 'number' : 293,", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : { 'esnumber'", "}, }, 'GL_EXT_float_blend' : { 'esnumber' : 224, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' :", "'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : { 'number' : 12, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : { 'esnumber' : 158, 'flags'", "'GL_NV_depth_nonlinear' : { 'esnumber' : 73, 'flags' : { 'public' }, 'url' :", "'GL_OES_depth_texture' : { 'esnumber' : 43, 'flags' : { 'public' }, 'url' :", "}, 'GL_QCOM_extended_get2' : { 'esnumber' : 63, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt',", "}, 'GL_EXT_copy_image' : { 'esnumber' : 175, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : {", "303, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 24, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' :", "{ 'esnumber' : 124, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square'", "'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' :", "}, }, 'GL_ARB_geometry_shader4' : { 'arbnumber' : 47, 'flags' : { 'public' },", ": 125, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' :", "'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : { 'number' : 327, 'esnumber' : 157,", "'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : { 'number' : 478, 'esnumber' : 241, 'flags' :", "{ 'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : { 'number' : 77,", "'3DL', 'ATI', 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : { 'number'", "264, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : { 'arbnumber' : 156, 'flags' : {", "}, 'GL_EXT_external_buffer' : { 'number' : 508, 'esnumber' : 284, 'flags' : {", "67, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : {", "{ 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary'", "119, 'esnumber' : 118, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_debug.txt', },", "227, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : {", "84, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : {", ": 223, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' :", "'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : { 'flags' : { 'incomplete' }, 'url'", "{ 'arbnumber' : 42, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'GL_SUN_convolution_border_modes' : { 'number' : 182, 'flags' : { 'public' }, 'supporters' :", ": { 'esnumber' : 85, 'flags' : { 'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : { 'number' :", "}, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : { 'arbnumber' : 67, 'flags' :", "}, 'GL_PGI_vertex_hints' : { 'number' : 76, 'flags' : { 'public' }, 'supporters'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', }, 'GL_ARB_multi_bind' : { 'arbnumber' :", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : { 'number' : 204,", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : { 'number'", "}, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : { 'arbnumber' : 188, 'flags' :", "'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : { 'number' : 196, 'flags' : { 'public' },", "'GL_NV_explicit_attrib_location' : { 'esnumber' : 159, 'flags' : { 'public' }, 'url' :", "'number' : 476, 'esnumber' : 237, 'flags' : { 'public' }, 'url' :", "'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : { 'number' : 92, 'flags' : { 'public' },", ": { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' : { 'arbnumber' :", ": { 'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : { 'number' :", "'GL_ARB_tessellation_shader' : { 'arbnumber' : 91, 'flags' : { 'public' }, 'url' :", ": 'Extension shipped but was not fully specified. Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array'", "}, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : { 'esnumber' : 273, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt',", "'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' :", ": 148, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp'", "}, 'GL_APPLE_flush_buffer_range' : { 'number' : 321, 'flags' : { 'public' }, 'supporters'", ": 133, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : { 'number'", "'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : { 'number' : 440, 'esnumber' : 99, 'flags' :", "'GL_SGIX_texture_add_env' : { 'number' : 69, 'flags' : { 'public' }, 'supporters' :", ": 119, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber' : 34,", "181, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : { 'number'", "EXT_texture_env_combine.', }, 'GL_EXT_texture' : { 'number' : 4, 'flags' : { 'public' },", "{ 'number' : 204, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'number' : 85, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "}, 'GL_EXT_sparse_texture' : { 'esnumber' : 240, 'flags' : { 'public' }, 'url'", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : { 'number' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : { 'arbnumber'", "{ 'esnumber' : 26, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt', },", "'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : { 'number' : 377, 'esnumber'", "'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt',", "'GL_EXT_shader_framebuffer_fetch' : { 'number' : 520, 'esnumber' : 122, 'flags' : { 'public'", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt',", "}, 'GL_AMD_query_buffer_object' : { 'number' : 420, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : {", ": 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : { 'number' : 401, 'flags' : { 'public'", ": 226, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : { 'flags' :", ": { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt', },", "'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : { 'number' : 408, 'flags' : { 'public' },", "}, 'GLX_ARB_create_context_profile' : { 'arbnumber' : 75, 'flags' : { 'public' }, 'url'", "'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : { 'number' : 214, 'flags' : { 'incomplete', 'public'", "{ 'number' : 418, 'esnumber' : 197, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : { 'arbnumber' : 157,", "298, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : {", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float' }, },", "98, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt',", "'url' : 'extensions/APPLE/APPLE_client_storage.txt', }, 'GL_APPLE_clip_distance' : { 'esnumber' : 193, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt',", "extensions, but shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : { 'esnumber' : 90, 'flags'", "'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : { 'number' : 450, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' :", ": 244, 'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers'", ": 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : { 'esnumber' : 221, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : { 'arbnumber' : 84, 'flags' : {", "}, 'GL_ARB_invalidate_subdata' : { 'arbnumber' : 132, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : { 'arbnumber' : 148, 'flags' :", ": 14, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI', 'SUN'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' :", ": 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' : { 'esnumber' :", ": { 'esnumber' : 150, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt',", "'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : { 'arbnumber' : 28, 'flags' : { 'public' },", ": 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : { 'number' : 282, 'flags' : { 'public'", "{ 'number' : 149, 'flags' : { 'public' }, 'supporters' : { '3DFX',", ": { 'number' : 485, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt',", "}, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : { 'arbnumber' : 13, 'flags' :", "'number' : 154, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "{ 'esnumber' : 86, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt', },", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' :", ": { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : { 'number' :", "208, 'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url' :", "12, 'flags' : { 'public' }, 'supporters' : { 'HP', 'KGC', 'SGI', 'SUN'", "'WGL_ARB_render_texture' : { 'arbnumber' : 20, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic' :", ": 244, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'number' : 491, 'esnumber' : 265, 'flags' : { 'public' }, 'supporters' :", "107, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : { 'esnumber' : 200, 'flags'", "'SUN' }, 'url' : 'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : { 'number' : 183, 'flags'", "'GL_SGIX_fog_offset' : { 'number' : 65, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : { 'arbnumber'", ": { 'arbnumber' : 96, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt',", ": 272, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' :", ": 168, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : {", "'GL_ATI_draw_buffers' : { 'number' : 277, 'flags' : { 'public' }, 'supporters' :", "'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : { 'number' : 317, 'flags' : { 'public' },", ": { 'number' : 442, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : { 'arbnumber' : 81,", "67, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : {", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', },", ": 90, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' :", "228, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : {", "{ 'number' : 485, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', },", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : { 'arbnumber' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB',", ": { 'number' : 462, 'esnumber' : 226, 'flags' : { 'public' },", "{ 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' : { 'arbnumber' : 58, 'flags' :", "'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : { 'arbnumber' : 136, 'flags'", "'esnumber' : 167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2'", "265, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt', },", "}, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : { 'number' : 474, 'esnumber' :", ": 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : { 'number' : 472, 'esnumber' : 235, 'flags'", ": 94, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' :", "}, 'GL_OES_texture_env_crossbar' : { 'esnumber' : 21, 'flags' : { 'public' }, 'url'", "'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' : 275, 'flags' : { 'public' },", "}, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : { 'number' : 69, 'flags' :", ": 139, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' :", ": 'extensions/3DL/WGL_3DL_stereo_control.txt', }, 'WGL_AMD_gpu_association' : { 'number' : 361, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : { 'arbnumber' : 136,", ": 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : { 'esnumber' : 142, 'flags' : { 'public'", "'esnumber' : 258, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image'", "255, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : {", "73, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : {", ": { 'number' : 296, 'esnumber' : 16, 'flags' : { 'public' },", "'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : { 'number' : 31, 'flags' : { 'public' },", "}, 'supporters' : { 'HP', 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex'", "'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' : 56, 'flags' : { 'public' },", "'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : { 'number' : 40, 'flags' : {", "'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : { 'flags' : { 'incomplete' }, 'url'", "'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' : 137, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : { 'number'", "}, 'GL_ARB_get_program_binary' : { 'arbnumber' : 96, 'flags' : { 'public' }, 'url'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : { 'number' : 475,", "191, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt', }, 'GLX_SGIX_color_type' : { 'number'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : { 'number' :", "}, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber' : 159, 'flags' :", "'number' : 127, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "23, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : {", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' :", ": 349, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' :", "'GLX_ARB_create_context' : { 'arbnumber' : 56, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' :", "42, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : {", "'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : { 'arbnumber' : 16, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : { 'number' : 242, 'flags'", "'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : { 'number' : 76, 'flags'", ": { 'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : { 'number' :", "}, 'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : { 'number' : 14, 'flags' :", "{ 'esnumber' : 222, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt', },", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : {", "'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : { 'esnumber' : 239,", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : { 'number' : 131,", "292, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", ": { 'number' : 20, 'flags' : { 'public' }, 'supporters' : {", "'arbnumber' : 125, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend'", "'esnumber' : 248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : {", "}, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic' : { 'number' : 187, 'esnumber' :", ": 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : { 'number' : 339, 'flags' : { 'public'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : { 'number' :", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : { 'number'", "}, 'supporters' : { 'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : {", "'number' : 52, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": { 'number' : 513, 'flags' : { 'public' }, 'supporters' : {", "}, 'GLU_SGI_filter4_parameters' : { 'number' : 85, 'flags' : { 'public' }, 'supporters'", "'number' : 351, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt', }, 'GL_EXT_shader_image_load_store' : { 'number' :", "'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : { 'arbnumber' : 5, 'flags' : { 'public' },", "}, 'GL_OES_point_sprite' : { 'esnumber' : 15, 'flags' : { 'public' }, 'url'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' :", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' :", "'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : { 'esnumber' : 91, 'flags'", ": { 'number' : 127, 'flags' : { 'public' }, 'supporters' : {", "{ 'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : { 'esnumber' : 222,", "'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : { 'esnumber' : 68, 'flags'", "'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : { 'number' : 410, 'esnumber' : 199, 'flags' :", ": { 'number' : 257, 'flags' : { 'public' }, 'supporters' : {", "'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : { 'arbnumber' : 2, 'flags'", ": { 'number' : 430, 'esnumber' : 126, 'flags' : { 'public' },", ": 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber' : 201, 'flags' : { 'public'", ": 285, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', },", ": 141, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' :", "'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture' : { 'esnumber' : 270, 'flags'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_texture.txt',", "'number' : 49, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "117, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr'", "{ 'number' : 391, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : { 'flags' : { 'incomplete' }, 'url'", "'arbnumber' : 57, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' :", "{ 'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : { 'number' : 165,", "'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : { 'number'", "'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : { 'esnumber' : 85, 'flags' : {", "}, 'GL_NV_explicit_multisample' : { 'number' : 357, 'flags' : { 'public' }, 'supporters'", ": 500, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' :", "'arbnumber' : 58, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage'", "'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : { 'arbnumber' : 82, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', },", "}, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : { 'number' : 411, 'flags' :", ": 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : { 'arbnumber' : 13, 'flags' : { 'public'", "}, 'GL_EXT_blend_func_separate' : { 'number' : 173, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'ES', 'HP', 'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', },", "{ 'arbnumber' : 32, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "{ '3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : { 'number' : 208,", "{ 'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : { 'arbnumber' : 68,", "'number' : 225, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 130, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'GL_QCOM_alpha_test' : { 'esnumber' : 89, 'flags' : { 'public' }, 'url' :", "'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : { 'number' : 58, 'flags' : { 'public' },", ": 236, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "{ 'public' }, 'supporters' : { 'MESA', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', },", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect'", "'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : { 'number' : 500, 'flags'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : { 'arbnumber' : 138,", ": 382, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "107, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : {", "58, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : {", ": 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : { 'arbnumber' : 44, 'flags' : { 'public'", "94, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url'", ": 438, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : { 'esnumber' : 26, 'flags' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2' : { 'number' : 227,", "{ 'esnumber' : 215, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', },", "'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : { 'number' : 509, 'flags' : { 'public' },", "{ 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' : { 'arbnumber' : 75, 'flags' : {", "'number' : 424, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_ARB_shadow' : { 'arbnumber' : 23, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 228, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "201, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : {", "{ 'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : { 'number' : 118,", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : { 'number'", "'url' : 'extensions/OES/OES_EGL_image.txt', }, 'GL_OES_EGL_image_external' : { 'esnumber' : 87, 'flags' : {", "'GL_SGIX_bali_r_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : { 'number'", ": { 'HP', 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : {", "'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' : { 'esnumber' : 177, 'flags' : { 'public'", "'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : { 'number' :", ": { 'number' : 30, 'flags' : { 'public' }, 'supporters' : {", "{ 'number' : 464, 'esnumber' : 227, 'flags' : { 'public' }, 'url'", "'GL_APPLE_element_array' : { 'number' : 271, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 368, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : { 'number'", ": 69, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' :", "{ 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : { 'number' :", "245, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "}, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : { 'number' : 442, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' : {", ": 126, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", ": 52, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "284, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt',", "{ 'number' : 451, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', },", ": { 'number' : 22, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_swap_group.txt',", ": { 'number' : 396, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : { 'esnumber'", "'GL_ARB_texture_env_add' : { 'arbnumber' : 6, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : { 'esnumber'", "'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : { 'arbnumber' : 106, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : { 'arbnumber'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : { 'esnumber' :", "'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : { 'number' : 350, 'flags'", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', },", "56, GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : { 'arbnumber' : 101, 'flags' : { 'public'", "342, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : { 'number' : 236, 'flags'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' :", "'GL_OES_draw_elements_base_vertex' : { 'esnumber' : 219, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : { 'number' : 128, 'flags' : {", "}, 'GL_ARB_shader_stencil_export' : { 'arbnumber' : 106, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : { 'number' : 332, 'esnumber' :", "{ 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/HP/HP_image_transform.txt', },", "}, 'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : { 'number' : 486, 'esnumber' :", ": 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber' : 51, 'flags' : { 'public'", "{ 'ES', 'HP', 'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : { 'number'", "50, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'GL_GREMEDY_string_marker' : { 'number' : 311, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : { 'number'", "'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' : 118, 'esnumber' : 117, 'flags' : { 'public'", "}, 'GL_OES_packed_depth_stencil' : { 'esnumber' : 44, 'flags' : { 'public' }, 'url'", "registry = { 'GL_3DFX_multisample' : { 'number' : 207, 'flags' : { 'public'", ": 297, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' :", ": 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : { 'number' : 434, 'flags' : { 'public'", "}, 'GL_SGIX_fog_patchy' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', },", "'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : { 'flags' : { 'incomplete' }, 'url'", "}, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber' : 84, 'flags' : { 'public' }, 'url'", ": { 'number' : 323, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : { 'number' : 468, 'esnumber' : 230,", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt', }, 'GL_NV_texture_compression_latc' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample'", "'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to WGL_ARB_create_context_profile not needed - see arbnumber", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' :", "'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' : { 'number' : 514, 'flags' : { 'public' },", "'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : { 'number' : 359, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : { 'number' : 268, 'flags'", "'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to GLX_ARB_create_context_profile not needed -", "{ 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile'", "}, 'GL_EXT_scene_marker' : { 'number' : 120, 'flags' : { 'public' }, 'url'", "'GL_EXT_texture_compression_bptc' : { 'esnumber' : 287, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : { 'number' : 415, 'flags' : {", ": 99, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' :", ": 360, 'esnumber' : 50, 'flags' : { 'public' }, 'supporters' : {", "'GL_EXT_tessellation_shader' : { 'esnumber' : 181, 'flags' : { 'public' }, 'url' :", "}, 'GL_ARB_internalformat_query2' : { 'arbnumber' : 131, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : { 'number' : 6, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : { 'arbnumber' : 158, 'flags' :", "'GL_OES_framebuffer_object' : { 'esnumber' : 10, 'flags' : { 'public' }, 'url' :", ": 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : { 'number' : 422, 'flags' : { 'public'", "'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : { 'number' : 62, 'flags'", "'GL_EXT_win32_keyed_mutex' : { 'number' : 506, 'esnumber' : 283, 'flags' : { 'public'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_flush_raster.txt', }, 'GL_SGIX_fog_blend' : { 'flags' :", ": 182, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' :", "3, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : { 'number'", "'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : {", ": 488, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' :", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : { 'flags'", ": { 'number' : 349, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 129, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt', },", "'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : { 'number' : 66, 'flags' : {", "}, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : { 'number' : 230, 'flags' :", "'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : { 'esnumber' : 77, 'flags' : {", "}, 'GL_EXT_blend_subtract' : { 'number' : 38, 'flags' : { 'public' }, 'supporters'", ": 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : { 'esnumber' : 244, 'flags' : { 'incomplete',", "{ 'MESA', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : { 'number' :", "}, 'GL_OES_stencil4' : { 'esnumber' : 32, 'flags' : { 'public' }, 'url'", ": 474, 'esnumber' : 261, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt',", ": 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' : { 'arbnumber' : 129, 'flags' : { 'public'", "'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : { 'arbnumber' : 175, 'esnumber' : 243, 'flags' :", "17, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'SGI', 'SUN'", ": { 'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : { 'number' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', },", "'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : { 'arbnumber' : 38, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias'", "}, 'GL_NV_fragment_program2' : { 'number' : 304, 'flags' : { 'public' }, 'supporters'", "'GL_NV_bindless_texture' : { 'number' : 418, 'esnumber' : 197, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt',", ": 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance' : { 'esnumber' : 203, 'flags' : { 'public'", "'GL_NV_alpha_to_coverage_dither_control' : { 'number' : 500, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : { 'number' : 155,", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt',", "'number' : 479, 'esnumber' : 242, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : { 'number'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : {", "'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on Visual Workstation 320 / 540 only.', }, 'GL_SGIX_ycrcba'", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt',", "156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : {", ": { 'number' : 223, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : {", "}, 'GL_OES_blend_equation_separate' : { 'esnumber' : 1, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : { 'number' : 294, 'esnumber' : 6,", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : { 'flags' : { 'incomplete'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' :", "'arbnumber' : 165, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv'", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : { 'arbnumber' : 95, 'flags'", "{ 'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : { 'esnumber' : 142,", "'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : { 'arbnumber' : 145, 'flags' : {", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : { 'number' :", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes'", "'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : { 'number'", ": 234, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' :", "{ 'number' : 160, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", ": 515, 'esnumber' : 292, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : { 'arbnumber' : 93, 'flags'", ": { 'number' : 290, 'flags' : { 'public' }, 'supporters' : {", "'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : { 'esnumber' : 186, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : { 'esnumber' :", "'GL_OML_interlace' : { 'number' : 239, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : { 'number' : 298, 'flags' :", "'number' : 205, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', }, 'GL_SGIX_occlusion_instrument' : { 'number' :", "'number' : 364, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming'", "{ 'arbnumber' : 55, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "59, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : {", "116, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : {", "}, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', }, 'GL_EXT_draw_buffers' : { 'esnumber' : 151, 'flags' :", "235, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' :", "}, 'GL_ARB_shading_language_packing' : { 'arbnumber' : 116, 'flags' : { 'public' }, 'url'", "}, 'GL_NV_fragment_program' : { 'number' : 282, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 241, 'flags' : { 'public' }, 'supporters' : {", ": 218, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : { 'flags' :", "'number' : 474, 'esnumber' : 261, 'flags' : { 'public' }, 'url' :", ": 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber' : 125, 'flags' : { 'public'", "}, 'GL_ARB_cull_distance' : { 'arbnumber' : 162, 'flags' : { 'public' }, 'url'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt',", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', },", "'3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : { 'number' : 206, 'flags'", "'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : { 'arbnumber' : 128, 'flags' : {", ": 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : { 'arbnumber' : 106, 'flags' : { 'public'", "'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : { 'number' : 433, 'esnumber' : 163, 'flags' :", "{ 'esnumber' : 150, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_disjoint_timer_query.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt', }, 'GL_NV_polygon_mode'", "'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : { 'esnumber' : 123, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : { 'number' : 10, 'flags'", ": 200, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' :", "}, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : { 'number' : 384, 'flags' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory'", "'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension", "175, 'esnumber' : 243, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_no_error.txt', },", ": 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : { 'number' : 213, 'flags' : { 'public'", "'WGL_I3D_digital_video_control' : { 'number' : 250, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' :", "'number' : 287, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : { 'esnumber'", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' :", ": 111, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : {", ": 371, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "149, 'flags' : { 'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND' },", "{ 'number' : 446, 'flags' : { 'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : { 'number' : 231, 'flags'", "'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : { 'number' : 7, 'flags' : { 'public' },", "195, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : { 'esnumber' : 143, 'flags' : { 'public'", "{ 'number' : 284, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'number' : 222, 'esnumber' : 52, 'flags' : { 'public' },", "'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : { 'number' : 201, 'flags' : { 'public' },", ": { 'number' : 274, 'flags' : { 'public' }, 'supporters' : {", "{ 'arbnumber' : 85, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt', },", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : {", "'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : { 'esnumber' : 172, 'flags' : { 'public' },", "}, }, 'GL_ARB_multitexture' : { 'arbnumber' : 1, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' :", ": 299, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' :", "{ 'number' : 496, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : { 'number' : 329, 'flags'", "{ 'esnumber' : 223, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', },", "{ 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : { 'number' : 67,", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : {", ": 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : { 'esnumber' : 124, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : { 'esnumber' : 127, 'flags' :", "'number' : 197, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' :", "'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : { 'arbnumber' : 128, 'flags'", "269, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url'", "'esnumber' : 106, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : { 'arbnumber' : 165,", "'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : {", "}, 'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : { 'arbnumber' : 35, 'flags' :", "{ 'number' : 409, 'flags' : { 'public' }, 'supporters' : { 'APPLE',", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber'", "'number' : 477, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5'", ": 89, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' :", ": 320, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' },", "'GL_NV_shadow_samplers_cube' : { 'esnumber' : 147, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', },", "'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : { 'esnumber' : 170, 'flags'", "'GL_NV_transform_feedback' : { 'number' : 341, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : { 'number' : 475, 'esnumber' : 196, 'flags'", "39, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "}, 'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : { 'esnumber' : 24, 'flags' :", "'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : { 'number' : 273, 'flags' : {", ": 75, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Included", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt',", "353, 'flags' : { 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : { 'arbnumber' : 121,", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : {", "'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : { 'number' : 473, 'esnumber' : 236,", ": { 'public' }, 'url' : 'extensions/NV/NV_image_formats.txt', }, 'GL_NV_instanced_arrays' : { 'esnumber' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : {", ": 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : { 'esnumber' : 27, 'flags' : { 'public'", ": 266, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": 44, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' :", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt',", ": 434, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : { 'esnumber' : 119, 'flags'", "'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : { 'arbnumber' : 33, 'flags' : {", "}, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : {", "'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : { 'arbnumber' : 186, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : { 'number' : 403, 'flags'", ": { 'esnumber' : 202, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt',", "'esnumber' : 135, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect'", "'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : { 'number'", ": { 'number' : 182, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : { 'arbnumber' : 190, 'flags' :", ": { 'esnumber' : 276, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt',", "'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : { 'arbnumber' : 27, 'flags' : { 'public' },", "'GL_ARB_multisample' : { 'arbnumber' : 5, 'flags' : { 'public' }, 'supporters' :", "'number' : 130, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "{ 'number' : 224, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', },", "'GL_EXT_texture_env_combine' : { 'number' : 158, 'flags' : { 'public' }, 'supporters' :", "'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : { 'esnumber' : 48, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'IdSoftware', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', },", "'esnumber' : 198, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', },", ": { 'esnumber' : 36, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float.txt',", ": 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : { 'number' : 136, 'flags' : { 'public'", "{ 'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : { 'number' : 478,", "'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : { 'arbnumber' : 112, 'flags' : { 'public' },", ": 269, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' :", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : { 'flags' : { 'incomplete'", "{ 'arbnumber' : 149, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', },", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1'", "'esnumber' : 152, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'CodeWeavers',", "245, 'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' :", "'GL_OES_EGL_image_external_essl3' : { 'esnumber' : 220, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : {", ": { 'number' : 507, 'flags' : { 'public' }, 'supporters' : {", "185, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url'", "'GL_HP_texture_lighting' : { 'number' : 111, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' : 59, 'flags'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine'", ": 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : { 'number' : 55, 'flags' : { 'public'", "'esnumber' : 218, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object'", "'GL_EXT_blend_minmax' : { 'number' : 37, 'esnumber' : 65, 'flags' : { 'public'", "}, 'GLU_EXT_object_space_tess' : { 'number' : 75, 'flags' : { 'public' }, 'supporters'", ": 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : { 'number' : 49, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', },", ": 59, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' :", "'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : { 'arbnumber' : 152, 'flags' : {", "'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : { 'arbnumber' : 170, 'esnumber' : 190, 'flags' :", ": 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : { 'esnumber' : 153, 'flags' : { 'public'", ": 233, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' :", ": { 'arbnumber' : 111, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : { 'esnumber' : 269, 'flags'", "}, 'GL_NV_pack_subimage' : { 'esnumber' : 132, 'flags' : { 'public' }, 'url'", "'number' : 285, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_fixed_point.txt', }, 'GL_OES_fragment_precision_high'", ": { 'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' : { 'esnumber' :", "44, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : {", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' :", "}, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : { 'number' : 138, 'flags' :", "'ES', 'HP', 'IBM', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : {", "118, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_debug.txt', }, 'GL_KHR_no_error' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber'", "'number' : 77, 'flags' : { 'public' }, 'supporters' : { 'TGS' },", ": 238, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' :", "}, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : { 'arbnumber' : 115, 'flags' :", ": 175, 'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url'", "{ 'arbnumber' : 170, 'esnumber' : 190, 'flags' : { 'public' }, 'url'", "383, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "223, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode' : {", "'GL_ARB_ES3_2_compatibility' : { 'arbnumber' : 176, 'flags' : { 'public' }, 'url' :", ": 163, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : { 'arbnumber' : 111, 'flags' : {", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : { 'number' :", "'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' : 277, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' : 62, 'flags'", "137, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : {", "'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3'", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : { 'number' :", ": { 'number' : 191, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : { 'number' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', },", ": 123, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' :", "'number' : 305, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' : {", "}, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : { 'number' : 86, 'flags' :", ": 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : { 'number' : 58, 'flags' : { 'public'", "}, 'GL_EXT_framebuffer_multisample' : { 'number' : 317, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : { 'arbnumber' : 3, 'flags' :", ": { 'arbnumber' : 62, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt',", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', },", ": 68, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev' : { 'arbnumber' : 151, 'flags'", "'number' : 357, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture' : { 'arbnumber' :", "{ 'number' : 491, 'esnumber' : 265, 'flags' : { 'public' }, 'supporters'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : { 'number' :", ": { 'number' : 449, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_image_load_formatted.txt',", "}, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : { 'number' : 12, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : { 'arbnumber' : 194, 'flags' :", "223, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : {", "'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : { 'number'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : { 'number' :", "'number' : 518, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer'", "}, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber' : 167, 'flags' : { 'public' }, 'url'", "}, 'GL_EXT_texture_compression_bptc' : { 'esnumber' : 287, 'flags' : { 'public' }, 'supporters'", "'WGL_NV_render_texture_rectangle' : { 'number' : 264, 'flags' : { 'public' }, 'supporters' :", "'GL_NVX_gpu_memory_info' : { 'number' : 438, 'flags' : { 'public' }, 'supporters' :", ": { 'arbnumber' : 185, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt',", "}, 'GL_ARB_vertex_array_object' : { 'arbnumber' : 54, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer'", "'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : { 'number' : 266, 'flags' : { 'public' },", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : { 'flags' :", "214, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', },", "'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber' : 46, 'flags' : {", ": 247, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' :", "'number' : 324, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_IBM_vertex_array_lists' : { 'number' : 201, 'flags' : { 'public' }, 'supporters' :", ": { 'esnumber' : 59, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : { 'arbnumber'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : { 'number' : 195,", ": { 'number' : 374, 'flags' : { 'public' }, 'supporters' : {", ": 70, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' :", "'number' : 293, 'esnumber' : 18, 'flags' : { 'public' }, 'supporters' :", "{ 'esnumber' : 162, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', },", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg'", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : { 'number' : 234, 'flags'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float'", "{ 'number' : 506, 'esnumber' : 283, 'flags' : { 'public' }, 'url'", "'esnumber' : 241, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2'", ": { 'arbnumber' : 151, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt',", "{ 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha'", "'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : { 'arbnumber' : 22, 'flags'", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : {", "}, 'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : { 'number' : 186, 'esnumber' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' :", ": { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt',", "{ 'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : { 'esnumber' : 256,", "{ 'arbnumber' : 69, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', },", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' :", "{ 'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' : 118,", "'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' :", "'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' : 46, 'flags' : { 'public' },", "433, 'esnumber' : 163, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', },", "'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' : 298, 'flags' : {", ": 'extensions/OES/OES_vertex_half_float.txt', }, 'GL_OES_vertex_type_10_10_10_2' : { 'esnumber' : 46, 'flags' : { 'public'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', }, 'GL_ARB_matrix_palette' : { 'arbnumber' :", ": { 'number' : 148, 'esnumber' : 69, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : { 'arbnumber' : 24, 'flags' :", "}, 'GL_EXT_multisample_compatibility' : { 'esnumber' : 248, 'flags' : { 'public' }, 'url'", "'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : { 'number' : 172,", "{ 'HP', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' :", "{ 'number' : 200, 'flags' : { 'public' }, 'supporters' : { 'IBM'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt', },", ": 101, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' :", ": 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : { 'esnumber' : 11, 'flags' : { 'public'", "'url' : 'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' : { 'esnumber'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : { 'number' :", ": { 'arbnumber' : 50, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : { 'number' : 74, 'flags' :", "{ 'number' : 155, 'flags' : { 'public' }, 'supporters' : { 'REND'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : { 'number' : 119,", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt', },", "'number' : 88, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "}, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : {", "}, 'GL_SGIS_pixel_texture' : { 'number' : 15, 'flags' : { 'public' }, 'supporters'", ": 220, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", ": { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', },", ": 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : { 'esnumber' : 251, 'flags' : { 'public'", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' :", "'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' : { 'number' : 192,", ": 118, 'esnumber' : 117, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt',", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table'", "'esnumber' : 165, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt', }, 'GL_ARM_shader_framebuffer_fetch_depth_stencil'", "'GL_ARB_draw_indirect' : { 'arbnumber' : 87, 'flags' : { 'public' }, 'url' :", "'GL_EXT_read_format_bgra' : { 'esnumber' : 66, 'flags' : { 'public' }, 'url' :", ": { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX'", "}, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : {", "{ 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary' : { 'esnumber' :", ": 339, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt',", ": { 'number' : 12, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : { 'number' : 358, 'flags' :", "}, 'GL_IBM_multimode_draw_arrays' : { 'number' : 200, 'flags' : { 'public' }, 'supporters'", ": 108, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' :", "}, 'GL_SGIS_point_line_texgen' : { 'number' : 213, 'flags' : { 'public' }, 'supporters'", "}, 'GL_ARB_sampler_objects' : { 'arbnumber' : 81, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', },", ": { 'esnumber' : 24, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth24.txt',", "}, 'supporters' : { 'ES', 'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' :", "181, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : {", "'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : {", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', },", ": { 'arbnumber' : 76, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt',", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object' },", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' :", "{ 'number' : 349, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'GL_ARB_gpu_shader5' : { 'arbnumber' : 88, 'flags' : { 'public' }, 'url' :", "'comments' : 'Draft spec location unknown.', }, 'GL_OES_point_size_array' : { 'esnumber' : 14,", "'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : { 'number' : 39, 'flags'", "123, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt', }, 'GLX_ARB_create_context' : {", ": { 'number' : 151, 'flags' : { 'incomplete' }, 'supporters' : {", ": { 'flags' : { 'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' :", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', },", "'GL_ARB_texture_rectangle' : { 'arbnumber' : 38, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : { 'esnumber' : 185, 'flags' : {", "'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : { 'number' : 503, 'esnumber'", ": { 'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' : { 'number' : 285, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt',", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform'", "'alias' : { 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' : { 'number' : 116, 'flags'", "}, 'GL_NV_query_resource' : { 'number' : 511, 'flags' : { 'public' }, 'supporters'", "'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt',", ": { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', },", ": 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : { 'number' : 266, 'flags' : { 'public'", "'GL_IMG_texture_compression_pvrtc' : { 'esnumber' : 54, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_video_out.txt',", ": 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : { 'number' : 24, 'flags' : { 'public'", ": { 'esnumber' : 8, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt',", "205, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : {", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : { 'number' : 160, 'flags'", "'number' : 369, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'number' : 331, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : { 'number' : 387,", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : { 'number' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', },", "98, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt',", "'GLX_SGI_video_sync' : { 'number' : 41, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_SGIX_bali_r_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture' },", ": 306, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : { 'number'", "'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : { 'number' : 506, 'esnumber' : 283, 'flags' :", ": 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : { 'arbnumber' : 132, 'flags' : { 'public'", "'number' : 271, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : { 'esnumber' : 206, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' :", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : { 'number' : 184, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : { 'esnumber'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : { 'number' :", "}, 'GL_APPLE_client_storage' : { 'number' : 270, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', }, 'GL_ARB_gpu_shader5' : { 'arbnumber' : 88, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt',", "'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' : 155, 'flags' : { 'public' },", ": { 'number' : 242, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 66, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm'", "'public' }, 'supporters' : { 'MS', 'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', }, 'GL_EXT_pixel_buffer_object'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB'", "'GL_NV_pixel_data_range' : { 'number' : 284, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : { 'esnumber' : 77, 'flags' : { 'public'", "'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : { 'number'", "{ 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', },", "'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : { 'arbnumber' : 61, 'flags' : { 'public' },", "}, 'GL_OVR_multiview' : { 'number' : 478, 'esnumber' : 241, 'flags' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', },", ": 254, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : { 'flags' : {", "{ 'number' : 12, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'esnumber' : 130, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc'", ": { 'arbnumber' : 92, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt',", "'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : { 'number' : 451, 'flags' : {", ": { 'number' : 88, 'flags' : { 'incomplete' }, 'supporters' : {", "45, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : {", "}, }, 'GL_EXT_secondary_color' : { 'number' : 145, 'flags' : { 'public' },", "'arbnumber' : 62, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect'", "{ 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' :", "'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : { 'esnumber' : 115, 'flags'", "105, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt', }, 'GLX_ARB_robustness_application_isolation' : {", ": 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : { 'number' : 120, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt',", "'number' : 347, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : {", ": { 'number' : 7, 'flags' : { 'public' }, 'supporters' : {", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : { 'number' :", ": 137, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' :", "'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : { 'number' : 469, 'esnumber'", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt', },", "'esnumber' : 119, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control'", "}, 'GLX_ARB_get_proc_address' : { 'arbnumber' : 2, 'flags' : { 'public' }, 'supporters'", "'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' : 99, 'flags' : { 'public' }, 'url' :", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : { 'number'", "'public' }, 'url' : 'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : { 'number' : 453, 'flags'", ": { 'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : { 'number' :", ": 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : { 'arbnumber' : 15, 'flags' : { 'public'", "'GL_NV_occlusion_query' : { 'number' : 261, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : { 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : { 'esnumber'", "'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : { 'number' : 406, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' :", "was not fully specified. Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', },", "'public' }, 'supporters' : { 'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' :", ": { 'number' : 456, 'flags' : { 'public' }, 'supporters' : {", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' :", "'arbnumber' : 84, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view'", "}, 'GL_APPLE_aux_depth_stencil' : { 'number' : 370, 'flags' : { 'public' }, 'supporters'", "'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : { 'number' : 263, 'flags'", "'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' : 182, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : { 'arbnumber' : 25, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : { 'esnumber'", "}, 'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : { 'number' : 190, 'flags' :", "442, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' :", "}, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : { 'number' : 518, 'flags' :", "'number' : 46, 'flags' : { 'obsolete' }, 'supporters' : { 'SGI' },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : { 'number' : 335, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : { 'number' :", ": 27, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' :", "'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' : { 'GL_EXT_shader_framebuffer_fetch_non_coherent' }, }, 'GL_EXT_shader_group_vote' : { 'esnumber' : 254,", "'GL_EXT_blend_logic_op' : { 'number' : 39, 'flags' : { 'public' }, 'supporters' :", "'GL_EXT_window_rectangles' : { 'number' : 490, 'esnumber' : 263, 'flags' : { 'public'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : {", ": 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : { 'number' : 455, 'flags' : { 'public'", ": 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : { 'number' : 101, 'flags' : { 'public'", "'GL_SGIS_texture_select' : { 'number' : 51, 'flags' : { 'public' }, 'supporters' :", "436, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : { 'number' : 14, 'flags' : { 'public' },", ": 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : { 'number' : 191, 'flags' : { 'public'", "'arbnumber' : 114, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot'", "}, 'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber' : 54, 'flags' :", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : { 'number' : 159, 'flags'", ": 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' : 182, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer' : { 'number' : 417,", "'GL_OES_sample_variables' : { 'esnumber' : 170, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : { 'esnumber' : 135, 'flags' : {", "{ 'esnumber' : 174, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', },", "'number' : 180, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : {", "'arbnumber' : 121, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture'", "'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : { 'number' : 178, 'flags' : { 'public' },", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : { 'number' : 497,", "{ 'number' : 94, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", "36, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : { 'arbnumber'", "'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : { 'esnumber' : 81, 'flags'", "66, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_read_format_bgra.txt', }, 'GL_EXT_render_snorm' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' : {", ": 104, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' :", "with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : { 'number' : 127, 'flags' : { 'public'", ": 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : { 'esnumber' : 255, 'flags' : { 'public'", "'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt', }, 'WGL_ARB_make_current_read' : { 'arbnumber' : 10, 'flags'", ": 103, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : { 'esnumber' : 92, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : { 'esnumber'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : { 'number' : 416,", "'number' : 22, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'GL_AMD_shader_ballot' : { 'number' : 497, 'flags' : { 'public' }, 'supporters' :", ": 57, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_color_buffer_float.txt', 'alias' :", "'NVIDIA' }, 'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : { 'number' : 493, 'flags'", "'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', }, 'GL_REND_screen_coordinates' : { 'number' : 155, 'flags' : { 'public' },", ": 90, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' },", "'GL_ARB_texture_stencil8' : { 'arbnumber' : 150, 'flags' : { 'public' }, 'url' :", "'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' :", ": { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : {", "'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : { 'number' :", "{ 'number' : 196, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' : 188, 'flags' : { 'public' }, 'url' :", "}, 'GL_ARB_point_sprite' : { 'arbnumber' : 35, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt', },", "}, 'GL_OES_sample_shading' : { 'esnumber' : 169, 'flags' : { 'public' }, 'url'", ": 104, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' :", ": 'extensions/ARB/ARB_shader_draw_parameters.txt', }, 'GL_ARB_shader_group_vote' : { 'arbnumber' : 157, 'flags' : { 'public'", ": 'extensions/REND/REND_screen_coordinates.txt', }, 'GL_S3_s3tc' : { 'number' : 276, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', },", "'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : { 'number' : 136, 'flags'", ": { 'KHR' }, 'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : { 'number' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : { 'number' :", "'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : { 'esnumber' : 20, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt',", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' :", "'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : { 'flags' : { 'incomplete' }, 'url'", "'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : { 'number' : 215, 'flags' : { 'public' },", "{ 'esnumber' : 290, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'GL_ATI_vertex_array_object' : { 'number' : 247, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3' : { 'number'", "'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : { 'number' : 303, 'flags' : { 'public' },", "{ 'esnumber' : 13, 'flags' : { 'incomplete', 'private' }, 'comments' : 'Draft", "'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : { 'arbnumber' : 188, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : { 'number' : 141, 'flags'", "'GL_AMD_debug_output' : { 'number' : 395, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : { 'number' : 500, 'flags' : { 'public' },", "}, 'GL_NVX_linked_gpu_multicast' : { 'number' : 493, 'flags' : { 'public' }, 'supporters'", ": 201, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' :", "'esnumber' : 101, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming'", ": 127, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' :", "'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : { 'number' : 465, 'esnumber' : 228, 'flags' :", "{ 'arbnumber' : 130, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', },", "{ 'esnumber' : 71, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_vertex_array_object.txt', },", "'number' : 246, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "the same name string.', }, 'GL_EXT_separate_specular_color' : { 'number' : 144, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : { 'esnumber'", ": 193, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' :", "}, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : { 'esnumber' : 113, 'flags' :", "}, 'GL_MESA_program_binary_formats' : { 'number' : 516, 'esnumber' : 294, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt',", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt', }, 'GLX_SGIX_swap_group' : { 'number' : 91, 'flags'", "'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' :", "}, 'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : { 'number' : 407, 'flags' :", "172, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : { 'number'", "{ 'arbnumber' : 145, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt', },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_convolution_accuracy.txt', },", "'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : { 'number' : 269, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : { 'arbnumber' :", ": 49, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : { 'number' :", ": 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : { 'flags' : { 'incomplete' }, 'url' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', },", "}, 'GL_SGIX_nurbs_eval' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt', },", "'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : { 'esnumber' : 183, 'flags' : { 'public' },", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : { 'number' : 365,", "'esnumber' : 78, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable'", "}, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber' : 51, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : { 'arbnumber' :", ": { 'arbnumber' : 116, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt',", "'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : { 'arbnumber' : 170, 'esnumber'", "'GL_SGIS_texture_filter4' : { 'number' : 7, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : { 'number' : 503, 'esnumber' : 280, 'flags'", "125, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : {", "'arbnumber' : 112, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : { 'number' : 186, 'esnumber'", "'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on Visual Workstation 320 / 540 only.',", "362, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_gpu_affinity.txt', }, 'WGL_NV_render_depth_texture' : {", "80, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : {", ": { 'public' }, 'url' : 'extensions/OES/OES_matrix_get.txt', }, 'GL_OES_matrix_palette' : { 'esnumber' :", "}, 'GL_ARB_texture_multisample' : { 'arbnumber' : 67, 'flags' : { 'public' }, 'url'", "'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : { 'esnumber' : 108,", "'public' }, 'url' : 'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : { 'esnumber' : 7, 'flags'", "'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : { 'arbnumber' : 132, 'flags' : {", "'GL_APPLE_framebuffer_multisample' : { 'esnumber' : 78, 'flags' : { 'public' }, 'url' :", "{ 'number' : 469, 'esnumber' : 231, 'flags' : { 'public' }, 'url'", ": { 'number' : 401, 'flags' : { 'public' }, 'supporters' : {", "169, 'esnumber' : 189, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', },", "{ 'public' }, 'supporters' : { 'INGR', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', },", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : { 'arbnumber'", "}, 'GL_HP_image_transform' : { 'number' : 66, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 275, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 75, 'flags' : { 'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', },", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt',", "{ 'arbnumber' : 165, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt',", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format'", ": { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : { 'esnumber' :", "'GL_ARB_texture_cube_map' : { 'arbnumber' : 7, 'flags' : { 'public' }, 'supporters' :", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : { 'number' : 2, 'flags'", "'Draft extension which is referred to by some other vendor extensions, but shipped", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_read_format.txt', }, 'GL_IMG_shader_binary' : { 'esnumber'", "}, 'GL_EXT_discard_framebuffer' : { 'esnumber' : 64, 'flags' : { 'public' }, 'url'", ": { 'number' : 101, 'flags' : { 'public' }, 'supporters' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : { 'number' : 33,", ": 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : { 'flags' : { 'incomplete' }, 'url' :", "}, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' : 79, 'flags' :", "'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber' : 166, 'flags'", "'WGL_EXT_display_color_table' : { 'number' : 167, 'flags' : { 'public' }, 'url' :", ": 90, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' :", ": 100, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', },", "'arbnumber' : 118, 'esnumber' : 117, 'flags' : { 'public' }, 'url' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : { 'arbnumber' : 67,", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt', },", "}, 'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : { 'number' : 434, 'flags' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : { 'flags'", "468, 'esnumber' : 230, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', },", ": { 'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : { 'number' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : { 'number' : 193, 'flags'", ": { 'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : { 'esnumber' :", ": 91, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' :", ": { 'number' : 239, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' : 83, 'flags' : {", ": { 'number' : 98, 'flags' : { 'public' }, 'supporters' : {", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : { 'number' : 273,", ": { 'number' : 156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt',", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : {", "{ 'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : { 'number' : 423,", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : { 'number' : 327,", ": { 'obsolete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', },", "'arbnumber' : 90, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples'", "}, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber' : 46, 'flags' : { 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : { 'arbnumber' : 117, 'flags'", ": 88, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' :", "}, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' : 111, 'flags' : { 'public' }, 'url'", ": 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : { 'esnumber' : 53, 'flags' : { 'public'", ": 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : { 'number' : 232, 'flags' : { 'public'", ": 130, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt',", "'esnumber' : 4, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", ": { 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' : { 'esnumber' : 35, 'flags' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' :", ": { 'number' : 102, 'flags' : { 'public' }, 'supporters' : {", ": 82, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' :", "}, 'GL_EXT_gpu_program_parameters' : { 'number' : 320, 'flags' : { 'public' }, 'supporters'", ": 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : { 'number' : 16, 'flags' : { 'public'", "'number' : 475, 'esnumber' : 196, 'flags' : { 'public' }, 'url' :", "{ 'number' : 408, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : { 'number' : 428, 'flags' : {", "'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : { 'arbnumber' : 108, 'flags' : {", ": 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : { 'esnumber' : 186, 'flags' : { 'public'", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : { 'number' : 16, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' : 275, 'flags'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : { 'arbnumber' : 33, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt',", ": { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', },", "'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : { 'esnumber' : 215, 'flags' : { 'public' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : {", "}, 'GL_OES_texture_float' : { 'esnumber' : 36, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : { 'arbnumber' :", "'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' : 130, 'flags' : { 'public' },", "'GLX_SGIX_pbuffer' : { 'number' : 50, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : { 'number'", "}, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : { 'number' : 435, 'flags' :", "{ 'arbnumber' : 48, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "{ 'esnumber' : 181, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias'", "'number' : 343, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : { 'number' : 407,", ": 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url'", "{ 'number' : 374, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt', }, 'GL_ARB_shader_precision' : { 'arbnumber' :", ": 511, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt',", "{ 'number' : 473, 'esnumber' : 236, 'flags' : { 'public' }, 'url'", ": { 'number' : 450, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt',", "'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : { 'number' : 307, 'flags' : { 'public' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image' },", "'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' :", ": { 'number' : 118, 'flags' : { 'incomplete' }, 'supporters' : {", "'WGL_ARB_pixel_format' : { 'arbnumber' : 9, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_EXT_coordinate_frame' : { 'number' : 156, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt', },", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : { 'number' : 400,", "'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : { 'number' : 255, 'flags'", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' :", "}, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : { 'number' : 148, 'esnumber' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : { 'arbnumber' : 44,", "'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : { 'arbnumber' : 80, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_instruments.txt', },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', },", "'GL_ARB_vertex_shader' : { 'arbnumber' : 31, 'flags' : { 'public' }, 'supporters' :", "115, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' : { 'number' :", "'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber' : 54, 'flags'", "{ 'esnumber' : 79, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', },", "'GLU_EXT_nurbs_tessellator' : { 'number' : 100, 'flags' : { 'public' }, 'supporters' :", "424, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'arbnumber' : 164, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : { 'number' : 149, 'flags'", "'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' : 155, 'flags'", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : { 'number' :", "'GL_EXT_shader_pixel_local_storage' : { 'esnumber' : 167, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : { 'flags'", "'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : { 'number' : 281, 'flags' : {", ": 281, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' : { 'number'", "'GL_NV_texgen_reflection' : { 'number' : 179, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', },", "'GL_EXT_multisample_compatibility' : { 'esnumber' : 248, 'flags' : { 'public' }, 'url' :", "'GLX_EXT_stereo_tree' : { 'number' : 452, 'flags' : { 'public' }, 'url' :", "'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : { 'esnumber' : 53, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : { 'esnumber' : 92, 'flags' :", "'GL_AMD_interleaved_elements' : { 'number' : 431, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 238, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video'", ": 281, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_fd.txt', 'alias' : {", "'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber' : 176, 'flags'", "{ 'number' : 59, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64'", "'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : { 'number' : 262, 'flags'", "}, 'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : { 'number' : 246, 'flags' :", "'url' : 'extensions/ATI/ATI_map_object_buffer.txt', }, 'GL_ATI_meminfo' : { 'number' : 359, 'flags' : {", ": 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap' : { 'number' : 344, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : { 'arbnumber' :", "{ 'esnumber' : 8, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', },", ": 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : { 'arbnumber' : 84, 'flags' : { 'public'", "'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : { 'arbnumber' : 174, 'esnumber' : 168, 'flags' :", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' :", "'GL_EXT_texture_swizzle' : { 'number' : 356, 'flags' : { 'public' }, 'supporters' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : {", "}, 'GL_NV_instanced_arrays' : { 'esnumber' : 145, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : { 'esnumber'", "'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : { 'number' : 431, 'flags'", ": 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : { 'arbnumber' : 7, 'flags' : { 'public'", ": { 'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : { 'esnumber' :", "164, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_performance_query.txt', }, 'GLX_INTEL_swap_event' : {", "'number' : 6, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP',", "}, 'GL_EXT_clip_control' : { 'esnumber' : 290, 'flags' : { 'public' }, 'supporters'", ": 178, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel' :", ": 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : { 'number' : 332, 'esnumber' : 286, 'flags'", "}, 'GL_ARB_texture_view' : { 'arbnumber' : 124, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : { 'number' : 376, 'flags' : {", "'GL_AMD_transform_feedback4' : { 'number' : 450, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : { 'number' : 266, 'flags' :", "254, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' :", ": { 'arbnumber' : 85, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt',", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' :", "}, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : { 'number' : 200, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'SGI' }, 'url'", "'number' : 119, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'GL_ARB_copy_buffer' : { 'arbnumber' : 59, 'flags' : { 'public' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : { 'number' : 283,", "'number' : 112, 'flags' : { 'public' }, 'supporters' : { 'MS' },", "'esnumber' : 129, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample' },", "'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : { 'number' : 412, 'flags' : {", "'GL_APPLE_vertex_array_range' : { 'number' : 274, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_NV_texgen_emboss' : { 'number' : 193, 'flags' : { 'public' }, 'supporters'", "'GL_EXT_texture_env_add' : { 'number' : 185, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 163, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_AMD_gpu_shader_int64' : { 'number' : 451, 'flags' : { 'public' }, 'url'", "}, 'GL_ARB_shader_group_vote' : { 'arbnumber' : 157, 'flags' : { 'public' }, 'url'", ": { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : { 'number' :", "'extensions/AMD/AMD_framebuffer_sample_positions.txt', }, 'GL_AMD_gcn_shader' : { 'number' : 453, 'flags' : { 'public' },", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : { 'flags' : {", "'GL_ARB_texture_compression_bptc' : { 'arbnumber' : 77, 'flags' : { 'public' }, 'url' :", "{ 'number' : 23, 'flags' : { 'public' }, 'supporters' : { 'ES',", "{ 'number' : 194, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' : 'Alias to GLX_ARB_create_context_profile not needed", "12, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'public' }, 'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : { 'esnumber' :", "{ 'number' : 181, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : { 'esnumber' : 119, 'flags' : { 'public'", "'GL_EXT_convolution' : { 'number' : 12, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object'", "'GL_EXT_fragment_lighting' : { 'number' : 102, 'flags' : { 'public' }, 'supporters' :", "37, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : {", "{ 'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : { 'number' : 258,", "}, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : { 'arbnumber' : 139, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : { 'number'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase' : { 'flags' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : { 'esnumber'", ": 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : { 'arbnumber' : 70, 'flags' : { 'public'", "'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' :", ": 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' : 169, 'esnumber' : 189, 'flags'", ": { 'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : { 'esnumber' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : {", "40, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GL_EXT_texture_compression_s3tc' : { 'number' : 198, 'esnumber' : 154, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' :", "'KHR' }, 'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : { 'number' : 240, 'flags'", "'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : { 'number' : 517, 'esnumber' : 297,", "}, 'GL_NV_texture_shader' : { 'number' : 230, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt',", ": { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt',", "'esnumber' : 120, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary'", "}, 'GL_SGIX_texture_coordinate_clamp' : { 'number' : 235, 'flags' : { 'public' }, 'supporters'", "'number' : 493, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_AMD_gpu_shader_int16' : { 'number' : 507, 'flags' : { 'public' }, 'supporters' :", ": 121, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' :", ": 47, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_get_program_binary.txt', }, 'GL_OES_gpu_shader5' :", ": { 'number' : 494, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 183, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', },", "'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : { 'esnumber' : 210, 'flags' : {", "'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' :", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : { 'number' : 413,", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' : {", "369, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt',", "}, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : { 'number' : 17, 'flags' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : { 'flags' : {", "}, 'GL_SUN_slice_accum' : { 'number' : 258, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : { 'esnumber' : 222, 'flags' :", "249, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", ": 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : { 'number' : 357, 'flags' : { 'public'", ": 27, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'SUN' },", "'GL_ARB_query_buffer_object' : { 'arbnumber' : 148, 'flags' : { 'public' }, 'url' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : { 'esnumber' : 120,", "'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : { 'number' : 146, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : { 'esnumber' : 215, 'flags'", "'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : { 'number' : 140, 'flags' : { 'incomplete' },", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface' : { 'number' : 219, 'flags'", "'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' : 59, 'flags' : { 'public' }, 'url' :", "'arbnumber' : 13, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : { 'number' : 401, 'flags' : {", "147, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : {", "'GL_ARB_robustness' : { 'arbnumber' : 105, 'flags' : { 'public' }, 'url' :", ": { 'number' : 158, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program'", "'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : { 'esnumber' : 19, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : { 'arbnumber' : 43,", "{ 'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : { 'arbnumber' : 64,", "'number' : 300, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", ": 430, 'esnumber' : 126, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_half_float.txt',", ": 482, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', }, 'GL_EXT_light_texture' :", "85, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'esnumber' : 55, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt',", "}, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : { 'number' : 189, 'flags' :", "'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : { 'esnumber' : 215, 'flags' : {", "}, 'GL_NV_depth_nonlinear' : { 'esnumber' : 73, 'flags' : { 'public' }, 'url'", "252, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' :", "'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : { 'number' : 36, 'flags' : {", ": 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : { 'esnumber' : 170, 'flags' : { 'public'", "'GL_SGIX_flush_raster' : { 'number' : 61, 'flags' : { 'public' }, 'supporters' :", "113, 'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url' :", "224, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : {", ": { 'number' : 79, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : { 'esnumber' :", ": 137, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' :", "'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : { 'number' : 478, 'esnumber'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', },", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt',", "'public' }, 'supporters' : { 'GOOGLE', 'NVIDIA', 'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', },", ": 486, 'esnumber' : 295, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt',", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean'", ": 200, 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : { 'number'", ": 189, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', }, 'GL_SGIX_texture_lod_bias'", "'supporters' : { 'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : { 'number'", ": { 'arbnumber' : 163, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt',", "'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : { 'number' : 30, 'flags' : {", "'url' : 'extensions/ATI/ATI_pn_triangles.txt', }, 'GL_ATI_separate_stencil' : { 'number' : 289, 'flags' : {", ": { 'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : { 'number' :", "'GL_EXT_texture_compression_latc' : { 'number' : 331, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync'", ": { 'arbnumber' : 89, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt',", "{ 'esnumber' : 216, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt', },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt',", "'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : { 'number' : 279, 'flags' : { 'public' },", "'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' : { 'arbnumber' : 174, 'esnumber'", ": { 'number' : 453, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt',", "'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : { 'arbnumber' : 112, 'flags'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : { 'number'", "'GL_3DFX_texture_compression_FXT1' : { 'number' : 206, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI', 'SUN' }, 'url'", "{ 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data'", "'extensions/NV/NV_half_float.txt', }, 'GL_NV_image_formats' : { 'esnumber' : 200, 'flags' : { 'public' },", "'arbnumber' : 15, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "}, 'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : { 'esnumber' : 3, 'flags' :", "'number' : 247, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "}, 'GL_OES_matrix_palette' : { 'esnumber' : 12, 'flags' : { 'public' }, 'url'", ": { 'number' : 395, 'flags' : { 'public' }, 'supporters' : {", "arbnumber 74.', }, 'WGL_ARB_create_context_profile' : { 'arbnumber' : 74, 'flags' : { 'public'", "444, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' : {", "'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : { 'number' : 32, 'flags' : {", "}, 'GL_EXT_index_material' : { 'number' : 94, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : { 'number'", "{ 'arbnumber' : 40, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : { 'number' : 57, 'flags' : {", ": { 'number' : 131, 'flags' : { 'incomplete' }, 'supporters' : {", ": 251, 'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url'", "'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber' : 177, 'flags'", ": { 'public' }, 'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : { 'number' :", "'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' : 62, 'flags' : { 'public' }, 'url' :", "'GL_ARB_fragment_program' : { 'arbnumber' : 27, 'flags' : { 'public' }, 'supporters' :", ": { 'esnumber' : 160, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt',", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : { 'number' :", "{ 'number' : 331, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle.txt', }, 'GL_NV_texture_rectangle_compressed' :", "{ 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : { 'number' : 277,", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_blit.txt', }, 'GL_EXT_framebuffer_multisample' : { 'number'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : { 'number' :", "}, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : { 'number' : 451, 'flags' :", "163, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_derivative_control.txt', }, 'GL_ARB_direct_state_access' : {", "8, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : {", ": 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : { 'flags' : { 'incomplete' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : { 'esnumber'", "'arbnumber' : 37, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image'", "258, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : {", ": 276, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : { 'esnumber'", ": 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' : 5, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : { 'flags' : { 'incomplete' }, 'url'", ": { 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' : { 'arbnumber' : 168, 'esnumber' :", "}, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : { 'arbnumber' : 105, 'flags' :", "'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' :", "}, 'GL_NV_shadow_samplers_cube' : { 'esnumber' : 147, 'flags' : { 'public' }, 'url'", "}, 'GL_EXT_timer_query' : { 'number' : 319, 'flags' : { 'public' }, 'supporters'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' : 276, 'flags'", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : { 'esnumber' : 109,", "'GL_SGIX_fragment_lighting_space' : { 'number' : 118, 'flags' : { 'incomplete' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' },", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : { 'arbnumber' :", "'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : { 'arbnumber' : 31, 'flags' : { 'public' },", "151, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : {", "}, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : { 'esnumber' : 176, 'flags' :", ": 235, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' :", ": { 'esnumber' : 19, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt',", ": { 'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : { 'esnumber' :", "}, 'GL_NV_conservative_raster' : { 'number' : 465, 'esnumber' : 228, 'flags' : {", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', }, 'GL_SGIX_ycrcb_subsample' : {", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', },", ": 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : { 'arbnumber' : 116, 'flags' : { 'public'", "226, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : {", "{ 'number' : 330, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_client_storage.txt',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : {", "'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : { 'number' : 438, 'flags' : { 'public' },", "'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : { 'number' : 37,", ": 186, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' :", "'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : { 'number' : 333, 'flags' : { 'public' },", "{ 'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : { 'number' : 257,", "'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' : 137, 'flags'", "'GL_IBM_cull_vertex' : { 'number' : 199, 'flags' : { 'public' }, 'supporters' :", "'GL_NV_deep_texture3D' : { 'number' : 424, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' :", ": { 'arbnumber' : 91, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt',", "{ 'public' }, 'url' : 'extensions/NV/NV_texture_compression_latc.txt', }, 'GL_NV_texture_compression_s3tc' : { 'esnumber' : 128,", "113, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : {", "}, 'GL_NV_occlusion_query' : { 'number' : 261, 'flags' : { 'public' }, 'supporters'", "}, 'GL_ARB_gl_spirv' : { 'arbnumber' : 190, 'flags' : { 'public' }, 'url'", ": 325, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'GL_SGI_texture_color_table' : { 'number' : 17, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt',", "{ 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_genlock.txt', }, 'WGL_I3D_image_buffer'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' :", "142, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation'", "'GL_NV_fragment_program2' : { 'number' : 304, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 209, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias'", "'arbnumber' : 101, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance'", "'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : { 'esnumber' : 192, 'flags' : { 'public' },", ": { 'number' : 429, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt',", "{ 'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : { 'flags' : {", "'esnumber' : 237, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle'", "267, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' :", ": { 'esnumber' : 136, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt',", ": { 'esnumber' : 171, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt',", ": { 'number' : 234, 'flags' : { 'public' }, 'supporters' : {", "'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : { 'esnumber' : 139, 'flags' : { 'public' },", "}, 'GLX_EXT_swap_control_tear' : { 'number' : 414, 'flags' : { 'public' }, 'supporters'", "'esnumber' : 56, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering'", ": 81, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' :", ": { 'esnumber' : 222, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt',", "}, 'GL_EXT_compiled_vertex_array' : { 'number' : 97, 'flags' : { 'public' }, 'supporters'", "65, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : {", "}, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : { 'esnumber' : 192, 'flags' :", ": 167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' : { 'number' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : {", "22, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : {", "'public' }, 'supporters' : { 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_texture_object.txt',", "'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : { 'number' : 163, 'flags' : { 'public' },", "497, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : {", ": { 'arbnumber' : 73, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt',", "}, 'GL_ARB_map_buffer_alignment' : { 'arbnumber' : 113, 'flags' : { 'public' }, 'url'", "116, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_packing.txt', }, 'GL_ARB_shadow' : {", ": { 'number' : 400, 'flags' : { 'public' }, 'supporters' : {", ": { 'arbnumber' : 134, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt',", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile' }, },", "'GLX_ARB_create_context_no_error' : { 'arbnumber' : 191, 'flags' : { 'public' }, 'url' :", ": 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : { 'arbnumber' : 114, 'flags' : { 'public'", "'arbnumber' : 126, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' :", "{ 'esnumber' : 135, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_bgr.txt', },", "'number' : 36, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt',", "'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt',", "'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2' : { 'arbnumber' : 93, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : { 'number'", "'number' : 362, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "{ 'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : { 'number' : 334,", "{ 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : { 'number' : 75,", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : { 'esnumber' : 183,", "54, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt', }, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber' : 51,", "}, 'GL_EXT_clip_volume_hint' : { 'number' : 79, 'flags' : { 'public' }, 'url'", "'arbnumber' : 178, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt', }, 'GL_ARB_half_float_pixel'", "{ 'esnumber' : 100, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', },", "'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : { 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : { 'arbnumber'", "'esnumber' : 276, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' :", "}, 'GL_ARB_blend_func_extended' : { 'arbnumber' : 78, 'flags' : { 'public' }, 'url'", "'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber' : 223, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' :", "'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : { 'esnumber' : 269, 'flags' : { 'public' },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : { 'number' : 190,", ": 145, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_texture.txt', }, 'GL_ARB_clip_control' :", "'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', }, 'GL_QCOM_tiled_rendering' : { 'esnumber'", "'Shares extension spec with WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' :", "'esnumber' : 301, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object'", "'number' : 268, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : { 'number' :", "{ 'esnumber' : 94, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', },", "'GL_NV_vertex_program4' : { 'number' : 325, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 77, 'flags' : { 'public' }, 'supporters' : {", "'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : { 'flags' : {", "'GL_NV_conservative_raster' : { 'number' : 465, 'esnumber' : 228, 'flags' : { 'public'", "}, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : { 'flags' : { 'incomplete' },", ": 153, 'flags' : { 'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' :", "{ 'esnumber' : 277, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_shader_framebuffer_fetch_noncoherent.txt', },", "{ 'public' }, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer'", "}, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : {", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : {", "}, 'GLX_ARB_robustness_application_isolation' : { 'arbnumber' : 142, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : { 'number' : 210, 'flags' :", ": { 'number' : 207, 'flags' : { 'public' }, 'supporters' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression_rgtc.txt', }, 'GL_ARB_texture_cube_map' : { 'arbnumber' : 7,", "'extensions/EXT/EXT_shared_texture_palette.txt', }, 'GL_EXT_sparse_texture' : { 'esnumber' : 240, 'flags' : { 'public' },", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : { 'flags' : {", ": 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : { 'arbnumber' : 71, 'flags' : { 'public'", "}, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : { 'arbnumber' : 15, 'flags' :", "{ 'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : { 'esnumber' : 208,", ": { 'number' : 17, 'flags' : { 'public' }, 'supporters' : {", "60, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": 212, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2' : { 'number' : 358, 'flags'", "'number' : 507, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "{ 'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : { 'number' : 301,", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' :", "{ 'number' : 351, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', },", "{ 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', },", "'esnumber' : 196, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent'", ": 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : { 'number' : 36, 'flags' : { 'public'", "}, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt',", "'SGI' }, 'url' : 'extensions/EXT/EXT_cull_vertex.txt', }, 'GL_EXT_debug_label' : { 'number' : 439, 'esnumber'", ": { 'esnumber' : 86, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt',", "'GL_EXT_sRGB_write_control' : { 'esnumber' : 153, 'flags' : { 'public' }, 'url' :", "{ 'esnumber' : 188, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', },", "}, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : { 'number' : 310, 'flags' :", "'number' : 218, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", ": { 'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : { 'number' :", ": 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : { 'number' : 410, 'esnumber' : 199, 'flags'", "{ 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : { 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : { 'arbnumber' : 141, 'flags'", "}, 'GL_EXT_blend_equation_separate' : { 'number' : 299, 'flags' : { 'public' }, 'supporters'", ": { 'esnumber' : 105, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : { 'esnumber' :", ": 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : { 'number' : 220, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : { 'number' : 357, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt',", "}, 'GL_SGIX_ycrcb' : { 'number' : 101, 'flags' : { 'public' }, 'supporters'", "'WGL_EXT_multisample' : { 'number' : 209, 'flags' : { 'public' }, 'url' :", ": { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : { 'number' :", ": 240, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : { 'number' : 296, 'esnumber' : 16, 'flags' :", "'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : { 'number' : 330, 'flags'", "'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' : 487, 'esnumber' : 262, 'flags' :", "'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber' : 63, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' :", "}, 'GLX_EXT_stereo_tree' : { 'number' : 452, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : { 'arbnumber'", "'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : { 'number' : 475, 'esnumber' : 196, 'flags' :", "'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : { 'number' : 287, 'flags' : {", "}, 'url' : 'extensions/OES/OES_stencil8.txt', }, 'GL_OES_stencil_wrap' : { 'esnumber' : 19, 'flags' :", "'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : { 'esnumber' : 34, 'flags'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : { 'esnumber' : 125,", ": 77, 'flags' : { 'public' }, 'supporters' : { 'TGS' }, 'url'", "'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : { 'esnumber' : 26, 'flags'", ": 'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : { 'number' : 457, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : { 'arbnumber' : 131, 'flags'", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' : { 'esnumber' : 215,", "102, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt', }, 'WGL_I3D_genlock' : { 'number' : 252, 'flags'", "'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : { 'esnumber' : 247, 'flags' : {", ": { 'number' : 472, 'esnumber' : 235, 'flags' : { 'public' },", "'obsolete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' :", "'GL_SUN_vertex' : { 'number' : 166, 'flags' : { 'public' }, 'supporters' :", "'number' : 21, 'flags' : { 'public' }, 'supporters' : { 'KGC', 'SGI'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy'", ": { 'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : { 'number' :", "'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : { 'number' : 110, 'flags'", "'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' : { 'arbnumber' : 26, 'flags' : { 'public'", "}, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber' : 299, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : { 'arbnumber' : 134, 'flags' : {", "'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : { 'number' : 335, 'flags' : { 'public' },", ": 245, 'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_program_binary.txt', }, 'GL_MTK_shader_binary'", ": { 'number' : 304, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/GLX_SUN_get_transparent_index.txt', }, 'GL_SUN_global_alpha' : { 'number'", "207, 'flags' : { 'public' }, 'supporters' : { '3DFX' }, 'url' :", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' : { 'arbnumber' : 171,", ": { 'number' : 325, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 179, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : { 'number' : 202, 'flags' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : { 'arbnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : { 'arbnumber' : 170,", ": { 'number' : 51, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : { 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt',", "'arbnumber' : 4, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : { 'esnumber' :", "'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : { 'esnumber' : 53, 'flags' : {", "348, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : { 'number'", "{ 'number' : 216, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'GL_NV_gpu_shader5' : { 'number' : 389, 'esnumber' : 260, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program1_1.txt', }, 'GL_NV_vertex_program2' : { 'number'", ": { 'number' : 268, 'flags' : { 'public' }, 'supporters' : {", ": 186, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' :", ": { 'number' : 356, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt',", "'GL_OES_geometry_shader' : { 'esnumber' : 210, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : {", "}, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' : { 'esnumber' : 255, 'flags' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : { 'arbnumber' :", ": { 'number' : 288, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 354, 'flags' : { 'public' }, 'supporters' : {", "69, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : {", "'url' : 'extensions/EXT/EXT_x11_sync_object.txt', }, 'GL_FJ_shader_binary_GCCSO' : { 'esnumber' : 114, 'flags' : {", "}, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : { 'number' : 80, 'flags' :", "'number' : 72, 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber' : 167,", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : { 'number' : 184,", ": 255, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' :", "512, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'GL_NV_conservative_raster_pre_snap' : { 'number' : 517, 'esnumber' : 297, 'flags' : {", ": 315, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : { 'esnumber' : 120, 'flags'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', },", "}, 'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : { 'flags' : { 'obsolete' },", "'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : { 'arbnumber' : 84, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : { 'arbnumber'", ": 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : { 'number' : 438, 'flags' : { 'public'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' : { 'number' :", "{ 'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : { 'number' : 239,", "}, 'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' : 188, 'flags' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : { 'number'", ": 368, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : { 'number' : 413, 'flags' : { 'public' },", "'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : { 'esnumber' : 137, 'flags' : { 'public' },", "'GL_NV_transform_feedback2' : { 'number' : 358, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', },", "'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : { 'esnumber' : 133, 'flags' : {", "'esnumber' : 278, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes'", "'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : { 'number' : 202, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : { 'number'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : { 'number'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : { 'number' :", "}, 'GL_INTEL_performance_query' : { 'number' : 443, 'esnumber' : 164, 'flags' : {", "'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' : { 'number' : 83, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : { 'arbnumber' : 112, 'flags' : {", "}, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : { 'number' : 159, 'flags' :", "'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : { 'esnumber' : 183, 'flags' : {", "'arbnumber' : 69, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex'", "{ 'number' : 387, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : { 'arbnumber' : 87, 'flags' : { 'public' },", "}, 'GL_ARM_shader_framebuffer_fetch_depth_stencil' : { 'esnumber' : 166, 'flags' : { 'public' }, 'url'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' :", "'esnumber' : 291, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16'", "}, 'GL_ARB_depth_buffer_float' : { 'arbnumber' : 43, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' :", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : { 'number' :", ": 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber' : 166, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : { 'arbnumber'", "'arbnumber' : 60, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array'", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt',", ": 80, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "{ 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', },", "'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' : { 'esnumber' : 287, 'flags'", ": 53, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "426, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : { 'number'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : { 'esnumber' :", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : { 'number' : 181,", "83, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : {", "}, 'GL_IBM_cull_vertex' : { 'number' : 199, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : { 'esnumber' : 32, 'flags' : {", "'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : { 'esnumber' : 155,", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt',", "{ 'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : { 'number' : 520,", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', }, 'GL_NV_shader_buffer_store' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' : 99, 'flags'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : { 'arbnumber'", ": 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : { 'number' : 378, 'flags' : { 'public'", "'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : { 'esnumber' : 93, 'flags' : {", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments'", "'arbnumber' : 3, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : {", ": { 'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : { 'number' :", "}, 'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : { 'esnumber' : 53, 'flags' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' : { 'esnumber' :", ": 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber' : 299, 'flags' : { 'public'", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : { 'number' : 132,", "}, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number' : 519, 'flags' : { 'public' }, 'supporters'", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : { 'number' : 58, 'flags'", "'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : { 'arbnumber' : 66, 'flags'", "}, 'GL_EXT_vertex_shader' : { 'number' : 248, 'flags' : { 'public' }, 'url'", "}, 'GL_ARB_compute_shader' : { 'arbnumber' : 122, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : { 'number' : 464, 'esnumber' : 227,", ": 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : { 'esnumber' : 66, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/WGL_NV_swap_group.txt', }, 'WGL_NV_video_output' : { 'number' : 349, 'flags' :", "extension with the same name string.', }, 'GL_EXT_separate_specular_color' : { 'number' : 144,", "'GL_ARB_shader_stencil_export' : { 'arbnumber' : 106, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt',", "'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_texture3D.txt', }, 'GL_EXT_texture_array' : { 'number' : 329,", "296, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : {", "'GL_EXT_texture_integer' : { 'number' : 343, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : { 'number' : 3, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : { 'number' : 422, 'flags'", "'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : { 'number' : 42, 'flags' : {", "{ 'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : { 'arbnumber' : 140, 'flags'", "'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : { 'number' : 208, 'flags' : {", "16, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'number' : 495, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "}, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : { 'number' : 392, 'flags' :", "'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : { 'number' : 443, 'esnumber'", "'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : { 'number' : 366, 'flags' : {", "'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : { 'number' : 27, 'flags'", "'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture' :", "'extensions/NV/NV_tessellation_program5.txt', }, 'GL_NV_texgen_emboss' : { 'number' : 193, 'flags' : { 'public' },", "'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : { 'number' : 216, 'flags' : {", "'number' : 303, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : { 'arbnumber' : 107, 'flags' :", "'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' : { 'arbnumber' : 181, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' : { 'esnumber'", ": 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : { 'number' : 45, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias'", "'number' : 511, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' :", "'esnumber' : 27, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point'", "}, 'GL_SGIX_bali_timer_instruments' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', },", ": 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : { 'number' : 126, 'flags' : { 'incomplete'", ": 272, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", ": 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : { 'flags' : { 'obsolete' }, 'url' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', },", "'esnumber' : 232, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer'", "'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_table_index_mode.txt',", "'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : { 'number' : 484, 'flags' : { 'public' },", "}, 'WGL_NV_DX_interop2' : { 'number' : 412, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2'", "}, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : { 'number' : 420, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : { 'esnumber' : 257, 'flags' :", ": 176, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'NVIDIA' },", "'esnumber' : 36, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias' :", "'esnumber' : 19, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt', }, 'GL_OES_surfaceless_context'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : { 'flags' : {", "'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : { 'number' : 337, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber' : 187, 'flags' : {", "'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' : { 'number' : 285, 'flags' : {", "'number' : 428, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'esnumber' : 17, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "{ 'public' }, 'supporters' : { 'MS', 'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt', },", "'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : { 'arbnumber' : 111, 'flags' : { 'public' },", "{ 'INGR', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : { 'number' :", "{ 'number' : 508, 'esnumber' : 284, 'flags' : { 'public' }, 'url'", "'number' : 176, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'NVIDIA'", "'number' : 405, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : { 'number' : 162, 'flags' : { 'incomplete'", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : { 'esnumber' : 218,", "}, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : { 'number' : 180, 'flags' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', },", "}, 'GL_IMG_texture_filter_cubic' : { 'esnumber' : 251, 'flags' : { 'public' }, 'url'", "'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : { 'number' : 507, 'flags' : { 'public' },", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : {", "197, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : { 'arbnumber' : 148,", ": 327, 'esnumber' : 157, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : { 'number' : 461, 'esnumber' : 225,", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_extensions_string.txt',", "'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : { 'number' : 353, 'flags' : { 'public' },", ": 52, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": { 'number' : 83, 'flags' : { 'public' }, 'supporters' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : { 'number' : 418, 'esnumber'", "'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : { 'arbnumber' : 123, 'flags'", "389, 'esnumber' : 260, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'number' : 43, 'flags' : { 'public' }, 'supporters' : {", "'GL_ARB_spirv_extensions' : { 'arbnumber' : 194, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 81, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : { 'number'", "'arbnumber' : 93, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3'", ": 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : { 'esnumber' : 80, 'flags' : { 'public'", "}, 'GL_EXT_pvrtc_sRGB' : { 'esnumber' : 155, 'flags' : { 'public' }, 'url'", ": { 'number' : 457, 'flags' : { 'public' }, 'supporters' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : { 'number' : 162, 'flags'", "'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : { 'number' : 97, 'flags' : { 'public' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' :", "'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : { 'number' : 47,", ": 132, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' :", ": 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' : { 'esnumber' : 209, 'flags' : { 'public'", "{ 'number' : 81, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": 55, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : { 'arbnumber' : 161, 'flags' : { 'public'", "}, 'GL_EXT_light_texture' : { 'number' : 117, 'flags' : { 'public' }, 'supporters'", "'number' : 272, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', },", ": { 'esnumber' : 299, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt',", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', }, 'GL_NV_evaluators' :", "'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : { 'number' : 441, 'flags'", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters'", ": 45, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "302, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : { 'arbnumber' : 94,", "}, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : { 'number' : 99, 'flags' :", "{ 'number' : 39, 'flags' : { 'public' }, 'supporters' : { 'HP',", "{ 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : { 'esnumber' :", "'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : { 'esnumber' : 212, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : { 'arbnumber'", "'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : { 'flags' : { 'incomplete' }, 'url'", "'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : { 'number' : 198, 'esnumber' : 154, 'flags' :", "'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : { 'number' : 172, 'flags'", ": 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : { 'number' : 372, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt', }, 'GL_ARB_fragment_coord_conventions' : { 'arbnumber' : 63,", ": { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : { 'arbnumber' :", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : {", "}, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : { 'number' : 424, 'flags' :", "}, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : { 'number' : 196, 'flags' :", "{ 'number' : 317, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', },", ": { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with arbnumber 55,", "'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : { 'number' : 323, 'flags' : {", "'arbnumber' : 45, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : { 'number' : 78, 'flags' : {", "'number' : 390, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : { 'esnumber' : 148, 'flags' : {", "}, 'GL_AMD_interleaved_elements' : { 'number' : 431, 'flags' : { 'public' }, 'supporters'", "'arbnumber' : 94, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced'", "'number' : 186, 'esnumber' : 60, 'flags' : { 'public' }, 'supporters' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : { 'arbnumber' : 14,", "'esnumber' : 67, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format'", "171, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url'", "}, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : { 'number' : 327, 'esnumber' :", "'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays'", ": 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : { 'number' : 440, 'esnumber' : 99, 'flags'", "'INGR', 'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : { 'number' : 78,", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : { 'arbnumber' : 57,", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : { 'number'", ": 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : { 'number' : 323, 'flags' : { 'public'", ": 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : { 'number' : 224, 'flags' : { 'public'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt', }, 'GL_EXT_vertex_shader' : {", ": 88, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' :", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object'", ": { 'number' : 377, 'esnumber' : 101, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' :", "'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : { 'number' : 174, 'flags' : { 'public' },", "'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' : { 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : {", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : { 'number' : 392,", "}, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : { 'arbnumber' : 64, 'flags' :", "126, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation'", "'GL_ARB_texture_storage' : { 'arbnumber' : 117, 'flags' : { 'public' }, 'url' :", "'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' : 264, 'flags'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' :", "86, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : {", "{ 'AMD' }, 'url' : 'extensions/INTEL/GLX_INTEL_swap_event.txt', }, 'GL_INTEL_texture_scissor' : { 'number' : 135,", "'GL_ARB_buffer_storage' : { 'arbnumber' : 144, 'flags' : { 'public' }, 'url' :", ": 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : { 'arbnumber' : 43, 'flags' : { 'public'", "'number' : 470, 'esnumber' : 233, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/NV/WGL_NV_render_texture_rectangle.txt', }, 'WGL_NV_swap_group' : { 'number' : 351, 'flags' : {", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control'", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' : 83, 'flags'", "'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : { 'esnumber' : 143, 'flags' : { 'public' },", "'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : { 'number'", ": 294, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "}, 'GL_QCOM_alpha_test' : { 'esnumber' : 89, 'flags' : { 'public' }, 'url'", ": { 'esnumber' : 244, 'flags' : { 'incomplete', 'private' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' :", "'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : { 'number' : 416, 'flags'", ": 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : { 'arbnumber' : 30, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : { 'arbnumber' : 114, 'flags'", "'arbnumber' : 23, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap'", ": 9, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR',", ": { 'number' : 230, 'flags' : { 'public' }, 'supporters' : {", "= { 'GL_3DFX_multisample' : { 'number' : 207, 'flags' : { 'public' },", "'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : { 'esnumber' : 71, 'flags' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4'", "'url' : 'extensions/NV/NV_vdpau_interop.txt', }, 'GL_NV_vertex_array_range' : { 'number' : 190, 'flags' : {", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' :", "'public' }, 'url' : 'extensions/ARB/ARB_bindless_texture.txt', }, 'GL_ARB_blend_func_extended' : { 'arbnumber' : 78, 'flags'", "}, 'GL_KHR_debug' : { 'arbnumber' : 119, 'esnumber' : 118, 'flags' : {", "{ 'number' : 275, 'flags' : { 'public' }, 'supporters' : { 'APPLE'", ": 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : { 'number' : 431, 'flags' : { 'public'", "'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : { 'arbnumber' : 19, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : { 'number' : 198,", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex'", "'GLX_EXT_visual_info' : { 'number' : 28, 'flags' : { 'public' }, 'supporters' :", "'number' : 393, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'esnumber' : 202, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2'", ": 51, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', }, 'GL_SGIX_list_priority' : {", ": 307, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'INGR', 'KGC', 'SGI', 'SUN' },", "}, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : { 'arbnumber' : 167, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : { 'arbnumber'", "'number' : 439, 'esnumber' : 98, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_ARB_shadow' : { 'arbnumber' : 23, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert'", ": { 'public' }, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', },", "'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : { 'number' : 400, 'flags' : {", "'number' : 503, 'esnumber' : 280, 'flags' : { 'public' }, 'url' :", "{ 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : {", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : { 'esnumber' : 113,", "{ 'public' }, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker'", "}, 'GL_MESA_window_pos' : { 'number' : 197, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator' : { 'number'", "'number' : 426, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": 301, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "109, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : {", "'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : { 'arbnumber' : 106, 'flags' : {", "}, 'GL_NV_light_max_exponent' : { 'number' : 189, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'GOOGLE', 'NVIDIA', 'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt',", "'GL_ARB_fragment_layer_viewport' : { 'arbnumber' : 129, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : { 'number'", ": 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : { 'esnumber' : 13, 'flags' : { 'incomplete',", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : { 'flags' : {", "{ 'number' : 422, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'WGL_NV_delay_before_swap' : { 'number' : 436, 'flags' : { 'public' }, 'supporters'", "88, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', }, 'GL_ARB_gpu_shader_fp64' : {", "367, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", ": 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : { 'esnumber' : 222, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility'", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt',", ": { 'arbnumber' : 193, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : { 'arbnumber' : 60, 'flags' : {", "'number' : 499, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_barrier.txt',", "}, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : { 'number' : 294, 'esnumber' :", "'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : { 'esnumber' : 272, 'flags' : {", "'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : { 'number'", "241, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : { 'flags' :", "'flags' : { 'incomplete' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt',", ": 'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : { 'number' : 189, 'flags' : { 'public'", "}, 'GL_OES_EGL_image_external_essl3' : { 'esnumber' : 220, 'flags' : { 'public' }, 'url'", "'GL_NV_copy_buffer' : { 'esnumber' : 158, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt', }, 'GL_NV_framebuffer_multisample' : { 'esnumber' : 143, 'flags' :", ": 160, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' :", ": 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that the OpenGL extension with", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query'", ": { 'esnumber' : 174, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt',", "'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : { 'arbnumber' : 156, 'flags' : { 'public' },", "'number' : 437, 'esnumber' : 161, 'flags' : { 'public' }, 'supporters' :", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' :", "{ 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', },", "'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : { 'number' : 480, 'flags' : { 'public' },", "'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : { 'number' : 306, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : { 'number' :", ": { 'arbnumber' : 171, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt',", "}, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : { 'arbnumber' : 136, 'flags' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_fbconfig_float.txt', }, 'GL_SGIX_flush_raster' : { 'number' :", ": 170, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' },", "'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : { 'number' : 170, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber' : 52, 'flags' : { 'public'", "}, 'GL_ARB_program_interface_query' : { 'arbnumber' : 134, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : { 'number' : 310, 'flags' : {", "{ 'number' : 75, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'GL_ARB_get_texture_sub_image' : { 'arbnumber' : 165, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : { 'arbnumber' : 77, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' : { 'arbnumber' : 103, 'flags' :", "{ 'public' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int64.txt', }, 'GL_AMD_interleaved_elements' : { 'number' : 431,", ": 26, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'arbnumber' : 103, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt', },", "{ 'number' : 249, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' : { 'incomplete',", "}, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : { 'number' : 161, 'flags' :", "216, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : {", ": { 'number' : 394, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' : {", ": 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : { 'number' : 459, 'flags' : { 'public'", "}, 'GL_AMD_vertex_shader_layer' : { 'number' : 417, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_render_depth_texture.txt',", "}, 'GL_SGIX_spotlight_cutoff' : { 'number' : 131, 'flags' : { 'incomplete' }, 'supporters'", "43, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "'GL_AMD_shader_stencil_value_export' : { 'number' : 444, 'flags' : { 'public' }, 'url' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : { 'arbnumber' : 146,", "{ 'arbnumber' : 134, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', },", "'NVIDIA' }, 'url' : 'extensions/ATI/ATI_text_fragment_shader.txt', }, 'GL_ATI_texture_env_combine3' : { 'number' : 279, 'flags'", "{ 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : { 'number' :", "'GL_ARB_texture_view' : { 'arbnumber' : 124, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : { 'number' : 10,", "'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : { 'arbnumber' : 13, 'flags' : {", ": { 'arbnumber' : 195, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : { 'number' : 284, 'flags' : { 'public'", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : { 'number' :", ": { 'number' : 166, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : { 'number' : 300, 'flags' : {", ": { 'esnumber' : 124, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt',", "222, 'esnumber' : 52, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : { 'number' : 405, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : { 'number'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' :", ": 'Supported on Visual Workstation 320 / 540 only.', }, 'GL_SGIX_ycrcba' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : { 'number'", "'esnumber' : 5, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : { 'number' : 394,", "'GL_NV_vertex_program3' : { 'number' : 306, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge'", "'public' }, 'supporters' : { 'IBM', 'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness'", "'url' : 'extensions/NV/NV_gpu_program5.txt', }, 'GL_NV_gpu_program5_mem_extended' : { 'number' : 434, 'flags' : {", ": { 'esnumber' : 32, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil4.txt',", "'number' : 161, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'number' : 148, 'esnumber' : 69, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'ES', 'INGR', 'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : { 'esnumber'", "'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : { 'arbnumber'", "'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber' : 176, 'flags' : {", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with arbnumber", ": 126, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'GL_ATI_element_array' : { 'number' : 256, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : { 'esnumber'", "'number' : 450, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_transform_feedback4.txt', }, 'GL_AMD_vertex_shader_layer'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt', }, 'GL_OES_texture_cube_map' : { 'esnumber'", "'supporters' : { 'QCOM' }, 'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : { 'esnumber'", ": { 'public' }, 'supporters' : { 'MS', 'SGI' }, 'url' : 'extensions/EXT/EXT_paletted_texture.txt',", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' : {", "'number' : 237, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "}, 'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : { 'esnumber' : 156, 'flags' :", "'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer'", "108, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : {", "{ 'number' : 35, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : { 'number' : 396, 'flags' : {", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : { 'arbnumber'", "{ 'number' : 425, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : { 'number' : 41, 'flags' : { 'public'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : { 'esnumber' : 182,", "'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber' : 21, 'flags'", "'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : { 'arbnumber' : 90, 'flags' : {", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample' },", "}, 'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : { 'arbnumber' : 36, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : { 'arbnumber' : 167,", "'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt', }, 'GL_IBM_static_data' :", ": 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber' : 167, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : { 'arbnumber' : 106, 'flags'", "'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : { 'number' : 228, 'flags' : {", "'GL_ATI_vertex_streams' : { 'number' : 249, 'flags' : { 'public' }, 'supporters' :", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' :", "'esnumber' : 187, 'flags' : { 'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture'", ": { 'arbnumber' : 125, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt',", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : {", "'GL_INTEL_map_texture' : { 'number' : 429, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_EXT_post_depth_coverage' : { 'number' : 461, 'esnumber' : 225, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : { 'number'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias' : { 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4'", "'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : { 'esnumber' : 167, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt',", "224, 'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : {", "'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt', }, 'GL_QCOM_framebuffer_foveated' : { 'esnumber' : 273, 'flags' : {", "'supporters' : { 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt', },", "'GL_EXT_transform_feedback' : { 'number' : 352, 'flags' : { 'public' }, 'supporters' :", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : {", ": 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' : 173, 'flags' : { 'public'", "'arbnumber' : 173, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix'", "'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : { 'number' : 88, 'flags' : {", "218, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' :", "'GLX_SGIX_visual_select_group' : { 'number' : 234, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : { 'number' : 319, 'flags'", "'number' : 156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image'", "'url' : 'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : { 'arbnumber' : 41, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber' : 291, 'flags'", "'url' : 'extensions/SGIX/SGIX_bali_timer_instruments.txt', }, 'GL_SGIX_blend_alpha_minmax' : { 'number' : 119, 'flags' : {", "}, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : { 'number' : 77, 'flags' :", "143, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation'", "}, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : { 'esnumber' : 207, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/IBM/IBM_static_data.txt', }, 'GL_IBM_texture_mirrored_repeat' : { 'number'", ": 293, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_image_load_store_lod.txt',", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fence.txt', },", ": { 'number' : 475, 'esnumber' : 196, 'flags' : { 'public' },", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex' :", ": { 'esnumber' : 28, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt',", ": { 'arbnumber' : 24, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : { 'esnumber' : 293, 'flags' :", "}, 'GL_AMD_shader_image_load_store_lod' : { 'number' : 513, 'flags' : { 'public' }, 'supporters'", ": 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : { 'number' : 432, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5'", ": 'Different that the OpenGL extension with the same name string.', }, 'GL_EXT_separate_specular_color'", "'number' : 521, 'esnumber' : 300, 'flags' : { 'public' }, 'supporters' :", "298, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'esnumber' : 156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd'", "375, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber' : 194,", ": 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : { 'number' : 377, 'esnumber' : 101, 'flags'", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt', },", "'esnumber' : 59, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888'", "'GL_ARB_window_pos' : { 'arbnumber' : 25, 'flags' : { 'public' }, 'supporters' :", ": 221, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : {", "'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' : { 'arbnumber' : 39, 'flags'", "}, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' : 135, 'flags' : { 'public' }, 'url'", "'MESA' }, 'url' : 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : { 'number' : 446, 'flags'", "'extensions/ARB/ARB_texture_env_combine.txt', }, 'GL_ARB_texture_env_crossbar' : { 'arbnumber' : 18, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : { 'number' : 95, 'flags' : {", "}, 'GL_SGIX_igloo_interface' : { 'number' : 219, 'flags' : { 'incomplete' }, 'supporters'", "'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : { 'arbnumber' : 134,", "}, 'GL_EXT_blend_color' : { 'number' : 2, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' :", "}, }, 'GL_ARB_sample_locations' : { 'arbnumber' : 181, 'flags' : { 'public' },", ": { 'arbnumber' : 27, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber' : 84,", "{ 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample_blit_scaled.txt', }, 'GL_EXT_framebuffer_object' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt',", "'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : { 'number' : 312, 'flags' : { 'public' },", ": 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : { 'number' : 181, 'flags' : { 'public'", "'number' : 371, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : { 'arbnumber' : 79, 'flags' : { 'public' },", "}, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : {", "{ 'arbnumber' : 49, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "{ 'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt', }, 'WGL_3DL_stereo_control' : { 'number' : 313,", "349, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'esnumber' : 284, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array'", "'esnumber' : 244, 'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt', },", ": 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : { 'esnumber' : 107, 'flags' : { 'public'", ": { 'esnumber' : 75, 'flags' : { 'public' }, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt',", ": { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : { 'number' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt',", "{ 'number' : 344, 'flags' : { 'public' }, 'supporters' : { 'MESA',", ": 62, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": 361, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : { 'number' : 506, 'esnumber'", "'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program' : { 'arbnumber' : 27, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5' :", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos'", "}, 'url' : 'extensions/ARB/ARB_ES3_compatibility.txt', }, 'GL_ARB_arrays_of_arrays' : { 'arbnumber' : 120, 'flags' :", "'GL_OML_subsample' : { 'number' : 240, 'flags' : { 'public' }, 'supporters' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : { 'number' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : {", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : { 'arbnumber' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : { 'arbnumber' :", "'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' :", "{ 'esnumber' : 175, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt', },", ": 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : { 'number' : 413, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : { 'esnumber'", ": { 'arbnumber' : 87, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt',", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : { 'esnumber' : 62,", "'GL_EXT_blend_equation_separate' : { 'number' : 299, 'flags' : { 'public' }, 'supporters' :", ": { 'number' : 258, 'flags' : { 'public' }, 'supporters' : {", "'GL_AMD_draw_buffers_blend' : { 'number' : 366, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_EXT_draw_buffers2' : { 'number' : 340, 'flags' : { 'public' }, 'supporters'", "}, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' : 250, 'flags' : { 'public' }, 'url'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : { 'number' :", "'extensions/ANGLE/ANGLE_texture_compression_dxt.txt', 'alias' : { 'GL_ANGLE_texture_compression_dxt1', 'GL_ANGLE_texture_compression_dxt5' }, }, 'GL_ANGLE_texture_usage' : { 'esnumber' :", "}, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : { 'number' : 309, 'esnumber' :", "}, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt', }, 'GL_ARM_rgba8' : { 'esnumber' : 82, 'flags' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : { 'number' : 56, 'flags'", "}, 'GL_EXT_abgr' : { 'number' : 1, 'flags' : { 'public' }, 'supporters'", "'GL_EXT_color_buffer_half_float' : { 'esnumber' : 97, 'flags' : { 'public' }, 'url' :", "{ 'esnumber' : 51, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', },", "}, 'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : { 'number' : 335, 'flags' :", ": { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : { 'esnumber' :", "}, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' : 110, 'flags' : { 'public' }, 'url'", "'GL_EXT_packed_pixels' : { 'number' : 23, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : { 'esnumber' : 23, 'flags' : { 'public' },", ": { 'arbnumber' : 168, 'esnumber' : 191, 'flags' : { 'public' },", "'esnumber' : 226, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra'", "}, 'GL_IMG_user_clip_plane' : { 'esnumber' : 57, 'flags' : { 'public' }, 'url'", ": 410, 'esnumber' : 199, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 484, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt',", "'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay'", "'GL_ARB_texture_compression_rgtc' : { 'arbnumber' : 52, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : { 'arbnumber' : 42, 'flags' :", "}, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : { 'esnumber' : 181, 'flags' :", ": 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : { 'number' : 32, 'flags' : { 'public'", "}, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : { 'esnumber' : 268, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : { 'number'", "'GL_HP_image_transform' : { 'number' : 66, 'flags' : { 'public' }, 'supporters' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : { 'number' : 13,", "{ 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' : { 'number' : 278, 'flags' : {", "'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nurbs_eval.txt',", ": { 'number' : 427, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : { 'number' : 251, 'flags' : {", "}, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : { 'number' : 198, 'esnumber' :", ": 415, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', }, 'GL_SGIX_nurbs_eval' : { 'flags'", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : { 'esnumber' : 124,", "287, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt',", "492, 'esnumber' : 266, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : { 'arbnumber' : 19, 'flags' :", "'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber' : 109, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt', },", "'GL_SGIX_texture_scale_bias' : { 'number' : 56, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : { 'number' : 213, 'flags' : {", "'NVIDIA', 'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : { 'number' : 406,", ": 499, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": 49, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' : { 'arbnumber' : 1,", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' :", "{ 'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' : 250,", "}, 'GL_APPLE_element_array' : { 'number' : 271, 'flags' : { 'public' }, 'supporters'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_layer.txt', }, 'GL_AMD_vertex_shader_tessellator' : { 'number' : 363,", ": { 'number' : 40, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_SUNX_constant_data' : { 'number' : 163, 'flags' : { 'public' }, 'supporters'", "'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' : 135, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber' : 106, 'flags' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', }, 'GL_SGIX_texture_phase'", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch_depth_stencil.txt', }, 'GL_ATI_draw_buffers' : { 'number' :", ": 269, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' },", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : { 'number' :", ": 367, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', },", ": 469, 'esnumber' : 231, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_mixed_samples.txt',", "'url' : 'extensions/QCOM/QCOM_tiled_rendering.txt', }, 'GL_QCOM_writeonly_rendering' : { 'esnumber' : 61, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_DX_interop.txt',", "'GL_EXT_texture_norm16' : { 'esnumber' : 207, 'flags' : { 'public' }, 'url' :", "'GL_ARB_ES2_compatibility' : { 'arbnumber' : 95, 'flags' : { 'public' }, 'url' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : {", ": { 'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' : { 'number' :", "'GL_NV_bindless_multi_draw_indirect' : { 'number' : 432, 'flags' : { 'public' }, 'supporters' :", ": { 'arbnumber' : 166, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt',", "'GL_NV_texture_env_combine4' : { 'number' : 195, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/SGIX/SGIX_tag_sample_buffer.txt', }, 'GL_SGIX_texture_add_env' : { 'number' : 69, 'flags' : { 'public'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : { 'flags' : {", "{ 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' : { 'number' : 443,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap'", "'GLU_SGI_filter4_parameters' : { 'number' : 85, 'flags' : { 'public' }, 'supporters' :", ": 54, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'number' : 289, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : { 'esnumber' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : { 'number' : 307,", "}, 'GL_NV_bindless_texture' : { 'number' : 418, 'esnumber' : 197, 'flags' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : {", "'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : { 'number' : 148, 'esnumber'", ": { 'esnumber' : 187, 'flags' : { 'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt',", "'extensions/SUN/SUN_convolution_border_modes.txt', }, 'GLX_SUN_get_transparent_index' : { 'number' : 183, 'flags' : { 'public' },", "'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : { 'number' : 92, 'flags' : {", "'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : { 'esnumber' : 221, 'flags' : {", "'extensions/OES/OES_draw_elements_base_vertex.txt', }, 'GL_OES_draw_texture' : { 'esnumber' : 7, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : { 'number' : 176, 'flags'", "202, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : {", "}, 'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber' : 104, 'flags' :", "'number' : 245, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", "331, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 15, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' :", "'url' : 'extensions/OES/OES_fbo_render_mipmap.txt', }, 'GL_OES_fixed_point' : { 'number' : 292, 'esnumber' : 9,", "'GL_EXT_texture_perturb_normal' : { 'number' : 147, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : { 'arbnumber' : 165, 'flags' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : { 'arbnumber' : 87,", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counters.txt', }, 'GL_ARB_shader_ballot' : { 'arbnumber'", "{ 'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, },", "{ 'public' }, 'url' : 'extensions/NV/NV_read_buffer.txt', }, 'GL_NV_read_depth_stencil' : { 'esnumber' : 94,", ": { 'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : { 'esnumber' :", ": 35, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' : {", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt',", "{ 'arbnumber' : 11, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : { 'number' :", "'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', }, 'GL_ARB_vertex_attrib_binding' : { 'arbnumber' : 125, 'flags' : {", "'GL_NV_draw_texture' : { 'number' : 430, 'esnumber' : 126, 'flags' : { 'public'", "{ 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : { 'esnumber' :", ": { 'number' : 112, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : { 'arbnumber' : 114, 'flags' :", ": { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : { 'number' :", "{ 'esnumber' : 85, 'flags' : { 'public' }, 'url' : 'extensions/VIV/VIV_shader_binary.txt', },", "'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : { 'esnumber' : 192, 'flags' : {", ": 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : { 'number' : 149, 'flags' : { 'public'", "'Previously shared extension number 15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : { 'number' :", "{ 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt', }, 'GL_NV_transform_feedback' :", "spec with WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' : { 'arbnumber'", "'extensions/ARB/GLX_ARB_get_proc_address.txt', }, 'GL_ARB_get_program_binary' : { 'arbnumber' : 96, 'flags' : { 'public' },", "}, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number' : 485, 'flags' :", "'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : { 'number' : 112, 'flags' : {", "'GL_EXT_index_func' : { 'number' : 95, 'flags' : { 'public' }, 'supporters' :", "'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : { 'number' : 301, 'flags' : { 'public' },", "{ 'esnumber' : 240, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', },", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : { 'number' : 60, 'flags'", "'GL_ARB_sparse_buffer' : { 'arbnumber' : 172, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted' : { 'number' : 449, 'flags' : {", "'GL_OES_texture_float_linear' : { 'esnumber' : 35, 'flags' : { 'public' }, 'url' :", "201, 'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' :", "{ 'arbnumber' : 93, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', },", ": 484, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' :", ": { 'esnumber' : 10, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt',", "'esnumber' : 179, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix'", "'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : { 'arbnumber' : 80, 'flags' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette' : { 'number'", "132, 'flags' : { 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : { 'number' : 53, 'flags' :", ": { 'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : { 'number' :", ": { 'number' : 204, 'flags' : { 'incomplete' }, 'supporters' : {", ": 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : { 'number' : 1, 'flags' : { 'public'", "}, 'GL_SGIX_async' : { 'number' : 132, 'flags' : { 'incomplete', 'public' },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : { 'number' : 232,", "'GL_AMD_shader_atomic_counter_ops' : { 'number' : 435, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 114, 'flags' : { 'public' }, 'url' : 'extensions/FJ/FJ_shader_binary_GCCSO.txt', }, 'GL_GREMEDY_frame_terminator'", "'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt',", "'GLX_SGI_transparent_pixel' : { 'number' : 153, 'flags' : { 'obsolete' }, 'url' :", "}, 'WGL_ARB_extensions_string' : { 'arbnumber' : 8, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt',", "'supporters' : { 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : {", ": 188, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : { 'number' : 191, 'flags'", "'GL_EXT_texture_mirror_clamp' : { 'number' : 298, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader3.txt', },", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : { 'number' :", "'arbnumber' : 106, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object'", "'url' : 'extensions/EXT/EXT_texture_lod_bias.txt', }, 'GL_EXT_texture_mirror_clamp' : { 'number' : 298, 'flags' : {", "'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : { 'number' : 379, 'flags' : {", "}, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : { 'flags' : { 'incomplete' },", "}, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : { 'arbnumber' : 178, 'flags' :", "'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : { 'number' : 306, 'flags' : {", ": 457, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : { 'arbnumber'", "}, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt', },", "}, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', }, 'GL_NV_bgr' : { 'esnumber' : 135, 'flags' :", "101, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : {", ": { 'number' : 172, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' : { 'number' :", "'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : { 'number' : 150, 'flags'", "'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : { 'number' : 243, 'flags' : { 'public' },", "'arbnumber' : 162, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output'", "'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' : 118, 'esnumber' : 117,", "{ 'esnumber' : 187, 'flags' : { 'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', },", "}, 'GLX_MESA_set_3dfx_mode' : { 'number' : 218, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', },", ": { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_viewport_offsetXXX.txt', }, 'GL_IMG_bindless_texture'", ": 'extensions/MESA/MESA_program_binary_formats.txt', }, 'GLX_MESA_query_renderer' : { 'number' : 446, 'flags' : { 'public'", ": { 'esnumber' : 67, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt',", "{ 'arbnumber' : 195, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'GL_KHR_robustness' : { 'arbnumber' : 170, 'esnumber' : 190, 'flags' : { 'public'", "'url' : 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : { 'number' : 62, 'flags' : {", "{ 'GL_KHR_texture_compression_astc_ldr' }, }, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' : 189, 'esnumber' : 249,", "'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : { 'number' : 391, 'flags'", ": { 'public' }, 'url' : 'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : { 'number' :", ": { 'HP', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_generate_mipmap.txt', }, 'GL_SGIS_line_texgen' : { 'flags'", ": 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : { 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/SGIX/SGIX_instruments.txt', }, 'GL_SGIX_interlace' : { 'number' : 45, 'flags' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_datapipe.txt', }, 'GL_SGIX_decimation' : { 'number' :", "{ 'flags' : { 'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : {", "'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : { 'arbnumber' : 77, 'flags' : { 'public' },", ": 141, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt',", "}, 'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' : { 'number' : 291, 'esnumber' :", ": { 'esnumber' : 103, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt',", "498, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : { 'esnumber' : 31, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : { 'arbnumber' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : { 'number' :", "}, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : { 'number' : 426, 'flags' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : { 'arbnumber'", ": 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : { 'esnumber' : 257, 'flags' : { 'public'", ": 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : { 'esnumber' : 194, 'flags' : { 'public'", ": 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : { 'number' : 462, 'esnumber' : 226, 'flags'", "'url' : 'extensions/EXT/EXT_texture_buffer_object.txt', }, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' : 276, 'flags' : {", "'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt',", ": 3, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'url' : 'extensions/SGIS/SGIS_sharpen_texture.txt', }, 'GL_SGIS_texture4D' : { 'number' : 16, 'flags' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : {", "'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : { 'number' : 156, 'flags' : { 'public' },", ": 25, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' :", "{ 'public' }, 'supporters' : { 'ES', 'HP', 'SGI', 'SUN' }, 'url' :", "'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : { 'arbnumber' : 190, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_map_object_buffer.txt',", "'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : { 'esnumber' : 43, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : { 'esnumber'", "513, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : { 'arbnumber' : 153, 'flags' : { 'public' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt', }, 'GL_NV_parameter_buffer_object2' : { 'number'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : { 'esnumber'", "'number' : 381, 'esnumber' : 271, 'flags' : { 'public' }, 'supporters' :", "'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : { 'number' : 506, 'esnumber' : 283,", ": 3, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_subtract.txt', }, 'GL_OES_byte_coordinates' :", "461, 'esnumber' : 225, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', },", "'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : { 'esnumber' : 214, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : { 'arbnumber' : 65, 'flags' : {", ": { 'esnumber' : 21, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt',", "'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : { 'esnumber' : 176, 'flags' : {", "'GL_SGIX_polynomial_ffd' : { 'number' : 59, 'flags' : { 'incomplete' }, 'supporters' :", "'GL_NV_tessellation_program5' : { 'number' : 391, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : { 'number' : 477, 'flags' : { 'public'", "'ARB' }, 'url' : 'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : { 'number' : 501,", "'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' : { 'number' : 429, 'flags' : {", "'url' : 'extensions/NV/NV_polygon_mode.txt', }, 'GL_NV_present_video' : { 'number' : 347, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_gamma.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt',", "'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : { 'number' : 120, 'flags' : { 'public' },", "'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : { 'number' : 421, 'flags' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt', }, 'GL_ARB_fragment_program_shadow' : {", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' :", "}, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' :", "'number' : 288, 'flags' : { 'public' }, 'supporters' : { 'ATI' },", ": 136, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed' :", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : { 'number' : 411,", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : { 'arbnumber' :", "{ 'number' : 283, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'number' : 311, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_half_float_vertex.txt', }, 'GL_ARB_indirect_parameters' : { 'arbnumber' : 154, 'flags' :", ": { 'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' : { 'number' :", "'esnumber' : 41, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : { 'number' : 228,", "'number' : 114, 'flags' : { 'public' }, 'supporters' : { 'MS' },", "'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : { 'arbnumber' : 76, 'flags' : {", ": { 'number' : 249, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : { 'number' : 393, 'flags' : { 'public'", "'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : { 'esnumber'", ": 'extensions/OML/GLX_OML_sync_control.txt', }, 'GL_OVR_multiview' : { 'number' : 478, 'esnumber' : 241, 'flags'", "}, 'GL_EXT_fog_coord' : { 'number' : 149, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'IBM', 'SUN' }, 'url' :", "'url' : 'extensions/IMG/IMG_multisampled_render_to_texture.txt', }, 'GL_IMG_program_binary' : { 'esnumber' : 67, 'flags' : {", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', },", "'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' : { 'esnumber' : 22, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array'", "'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : { 'number' : 93,", "{ 'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : { 'esnumber' : 278,", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp.txt', }, 'GL_EXT_texture_mirror_clamp_to_edge' : { 'esnumber'", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_bali_r_instruments.txt', }, 'GL_SGIX_bali_timer_instruments' : { 'flags' : {", "'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : { 'arbnumber' : 51, 'flags' : { 'public' },", ": { 'esnumber' : 145, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt',", "}, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : { 'arbnumber' : 32, 'flags' :", "}, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' : {", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt',", ": 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : { 'number' : 354, 'flags' : { 'public'", ": 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : { 'flags' : { 'incomplete' }, 'url' :", "'KHR' }, 'url' : 'extensions/OML/GLX_OML_swap_method.txt', }, 'GLX_OML_sync_control' : { 'number' : 238, 'flags'", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : {", "'arbnumber' : 74, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' :", "{ 'arbnumber' : 59, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt', },", "{ 'number' : 164, 'flags' : { 'public' }, 'supporters' : { 'SUN'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : { 'arbnumber' : 71,", "'GL_EXT_texture_buffer' : { 'esnumber' : 183, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : { 'esnumber' :", "{ 'arbnumber' : 18, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float'", "}, 'GL_KHR_texture_compression_astc_sliced_3d' : { 'arbnumber' : 189, 'esnumber' : 249, 'flags' : {", ": { 'esnumber' : 108, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt',", "{ 'arbnumber' : 129, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', },", ": 33, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : { 'number' : 357, 'flags' : {", ": 246, 'flags' : { 'public' }, 'url' : 'extensions/INTEL/INTEL_framebuffer_CMAA.txt', }, 'GL_INTEL_map_texture' :", ": { 'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : { 'esnumber' :", "'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : { 'number' : 79, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : { 'number' : 433, 'esnumber' :", "}, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' : {", "'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : { 'number' : 494, 'flags' : { 'public' },", "}, }, 'GL_EXT_cull_vertex' : { 'number' : 98, 'flags' : { 'public' },", ": { 'number' : 514, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : { 'number'", "}, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', }, 'GL_SGIS_generate_mipmap' : { 'number' : 32, 'flags' :", ": 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : { 'number' : 236, 'flags' : { 'incomplete'", "'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : { 'esnumber' : 220, 'flags' : {", "505, 'esnumber' : 282, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias'", ": 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : { 'esnumber' : 213, 'flags' : { 'public'", ": 444, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_stencil_value_export.txt', }, 'GL_AMD_shader_trinary_minmax' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : { 'arbnumber' :", "'number' : 137, 'flags' : { 'public' }, 'supporters' : { 'HP' },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : { 'number' : 467,", ": 133, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' :", "366, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url'", ": 'extensions/NV/NV_texture_multisample.txt', }, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' : 96, 'flags' : { 'public'", "}, 'GL_EXT_shader_image_load_formatted' : { 'number' : 449, 'flags' : { 'public' }, 'url'", "{ 'number' : 172, 'flags' : { 'public' }, 'supporters' : { 'INGR',", "{ 'number' : 229, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : { 'esnumber' : 220, 'flags' : { 'public' },", "'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' : 83, 'flags' : { 'public' }, 'url' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5.txt',", "1, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI', 'SUN'", "}, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', },", ": 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func' : { 'number' : 95, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : { 'esnumber' : 106,", "{ 'number' : 352, 'flags' : { 'public' }, 'supporters' : { 'APPLE',", ": 472, 'esnumber' : 235, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' : { 'arbnumber' :", ": 522, 'esnumber' : 301, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt',", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber'", "{ 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list'", "{ 'esnumber' : 56, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_performance_monitor_global_mode.txt', },", "'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : { 'arbnumber' : 11, 'flags'", "{ 'arbnumber' : 3, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'url' : 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : { 'number' : 416, 'flags' :", ": 350, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : { 'arbnumber' : 20, 'flags' : { 'public'", "}, 'GLX_SGIX_color_typeXXX' : { 'number' : 72, 'flags' : { 'incomplete' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : { 'arbnumber' : 38,", "'arbnumber' : 181, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_dot3.txt', }, 'GL_ARB_texture_filter_anisotropic'", ": 37, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' :", "'GLX_SGIX_wait_group' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb'", "407, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : { 'number' : 198, 'esnumber' : 154,", "'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : { 'flags' : { 'incomplete', 'public' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : { 'number'", "'arbnumber' : 95, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt', }, 'GL_ARB_ES3_1_compatibility'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_swap_group.txt', }, 'GL_SGIX_tag_sample_buffer' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : { 'arbnumber'", "{ 'GOOGLE', 'NVIDIA', 'VMware' }, 'url' : 'extensions/EXT/EXT_window_rectangles.txt', }, 'GL_EXT_x11_sync_object' : { 'number'", ": { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt',", "388, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'GL_ARB_pixel_buffer_object' : { 'arbnumber' : 42, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/EXT/EXT_stencil_clear_tag.txt', }, 'GL_EXT_stencil_two_side' : { 'number' : 268, 'flags' :", "'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : { 'number' : 293, 'esnumber' : 18, 'flags' :", "{ 'number' : 33, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'GL_OES_draw_texture' : { 'esnumber' : 7, 'flags' : { 'public' }, 'url'", "'arbnumber' : 29, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' : { 'number' : 510,", "'url' : 'extensions/NV/NV_multisample_coverage.txt', }, 'GL_NV_multisample_filter_hint' : { 'number' : 259, 'flags' : {", "'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : { 'number' : 391, 'flags' : { 'public' },", "'number' : 294, 'esnumber' : 6, 'flags' : { 'public' }, 'supporters' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' : { 'flags' : { 'incomplete'", "'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : { 'arbnumber' : 161, 'flags' : { 'public' },", "'GL_EXT_stencil_wrap' : { 'number' : 176, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber' : 34, 'flags' : { 'public' }, 'supporters'", "{ 'esnumber' : 102, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', },", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : { 'number' : 502,", "}, 'GL_MTK_shader_binary' : { 'esnumber' : 244, 'flags' : { 'incomplete', 'private' },", "'url' : 'extensions/EXT/EXT_buffer_storage.txt', }, 'GL_EXT_clear_texture' : { 'esnumber' : 269, 'flags' : {", ": { 'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : { 'number' :", "145, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : {", "'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : { 'flags' : { 'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt',", "{ 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : { 'number' : 163,", "'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform.txt', }, 'GL_EXT_pixel_transform_color_table' : { 'number' : 139, 'flags'", "'url' : 'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : { 'number' : 212, 'flags' : {", "'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : { 'number' : 340, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : {", ": 258, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' :", "'url' : 'extensions/SGIX/SGIX_async_pixel.txt', }, 'GL_SGIX_bali_g_instruments' : { 'flags' : { 'incomplete' }, 'url'", "{ 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : { 'flags' :", "}, }, 'GL_SGIS_multitexture' : { 'number' : 116, 'flags' : { 'obsolete' },", "}, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' :", "'GL_AMD_shader_image_load_store_lod' : { 'number' : 513, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : { 'arbnumber' : 6, 'flags' : { 'public'", "{ 'esnumber' : 1, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', },", "{ 'arbnumber' : 27, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'GL_SUN_mesh_array' : { 'number' : 257, 'flags' : { 'public' }, 'supporters'", "not needed - see arbnumber 74.', }, 'WGL_ARB_create_context_profile' : { 'arbnumber' : 74,", ": 263, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'GLX_EXT_scene_marker' }, }, 'GL_EXT_secondary_color' : { 'number' : 145, 'flags' :", "229, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program' : {", ": 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : { 'number' : 65, 'flags' : { 'public'", ": 120, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' :", "{ 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_material.txt', },", ": 270, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_bindless_texture.txt', }, 'GL_IMG_framebuffer_downsample' :", "{ 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : { 'number' : 425,", "'esnumber' : 146, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube'", "'GL_APPLE_sync' : { 'esnumber' : 124, 'flags' : { 'public' }, 'url' :", "'GL_OES_EGL_sync' : { 'esnumber' : 75, 'flags' : { 'public' }, 'url' :", "'public' }, 'url' : 'extensions/OES/OES_sample_variables.txt', }, 'GL_OES_shader_image_atomic' : { 'esnumber' : 171, 'flags'", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt', }, 'WGL_I3D_digital_video_control' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : { 'number' : 140,", ": 74, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' :", ": 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : { 'number' : 321, 'flags' : { 'public'", ": 'extensions/APPLE/APPLE_aux_depth_stencil.txt', }, 'GL_APPLE_client_storage' : { 'number' : 270, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : { 'arbnumber' : 25, 'flags'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : { 'flags'", ": { 'number' : 117, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' : { 'number' :", "'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : { 'esnumber' : 141, 'flags' : { 'public' },", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' :", "'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : { 'arbnumber' : 85, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : { 'esnumber' : 180, 'flags' : {", "'GL_EXT_texture_compression_dxt1' : { 'number' : 309, 'esnumber' : 49, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_draw_elements_base_vertex.txt', }, 'GL_ARB_draw_indirect' : { 'arbnumber' : 87, 'flags'", "'GL_ARB_debug_output' : { 'arbnumber' : 104, 'flags' : { 'public' }, 'url' :", ": 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' : { 'arbnumber' : 118, 'esnumber' : 117, 'flags'", "'public' }, 'supporters' : { 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_histogram.txt',", ": 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias' : { 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' : { 'number' :", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' :", "{ 'number' : 346, 'esnumber' : 198, 'flags' : { 'public' }, 'supporters'", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : { 'number' : 373,", ": 69, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'IMG', 'SUN'", "'number' : 255, 'flags' : { 'public' }, 'supporters' : { 'I3D' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : {", "'alias' : { 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' : { 'number' : 177, 'flags'", "}, 'GL_SGIS_multitexture' : { 'number' : 116, 'flags' : { 'obsolete' }, 'supporters'", "'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : { 'esnumber' : 43, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : { 'number' :", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : { 'number' :", "}, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' : { 'number' : 496, 'flags' :", "139, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url'", ": { 'number' : 424, 'flags' : { 'public' }, 'supporters' : {", "'arbnumber' : 75, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt', 'comments' :", ": { 'IBM', 'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : { 'number'", "{ 'esnumber' : 256, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt', },", ": { 'number' : 255, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/AMD/AMD_draw_buffers_blend.txt', }, 'GL_AMD_framebuffer_sample_positions' :", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt',", "'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : { 'flags' : { 'incomplete' }, 'url'", ": 318, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", "463, 'esnumber' : 259, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', },", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/EXT/EXT_texture_env_dot3.txt', }, 'GL_EXT_texture_filter_anisotropic'", ": { 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' : { 'number' : 83, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_npot.txt', }, 'GL_OES_texture_stencil8' : { 'esnumber'", "{ 'arbnumber' : 24, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "'number' : 59, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'url' : 'extensions/SGI/GLX_SGI_video_sync.txt', }, 'GL_SUNX_constant_data' : { 'number' : 163, 'flags' : {", "168, 'esnumber' : 191, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias'", "{ 'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : { 'esnumber' : 170,", ": { 'arbnumber' : 158, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt',", "}, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' : {", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : { 'arbnumber' : 105,", "{ 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : { 'number' :", "}, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : {", "{ 'number' : 294, 'esnumber' : 6, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy' : { 'flags' : { 'incomplete' }, 'url'", ": 298, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' :", "'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : { 'number' : 176, 'flags' : { 'public' },", "{ 'esnumber' : 221, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', },", "'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : { 'number' : 20, 'flags'", "'url' : 'extensions/3DFX/3DFX_texture_compression_FXT1.txt', }, 'GL_AMD_blend_minmax_factor' : { 'number' : 404, 'flags' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : { 'number' : 330,", "'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : { 'arbnumber' : 186, 'flags'", "}, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : { 'esnumber' : 185, 'flags' :", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp'", "'GL_EXT_draw_buffers2' : { 'number' : 340, 'flags' : { 'public' }, 'supporters' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample'", "146, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', }, 'GL_ARB_explicit_attrib_location' : {", ": 'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' : { 'number' :", ": 'Previously shared extension number 15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits' : { 'number'", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multisample.txt', 'alias' : { 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture'", "{ 'number' : 38, 'flags' : { 'public' }, 'supporters' : { 'HP',", ": 120, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance' :", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt', }, 'GL_EXT_texture_sRGB_R8' : { 'esnumber' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader'", "{ 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : { 'number' : 251,", "'arbnumber' : 104, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float'", "'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : { 'number' : 102, 'flags'", "{ 'esnumber' : 5, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', },", "'extensions/ARB/ARB_multisample.txt', 'alias' : { 'GLX_ARB_multisample', 'WGL_ARB_multisample' }, }, 'GL_ARB_multitexture' : { 'arbnumber' :", "'GL_ARB_texture_env_dot3' : { 'arbnumber' : 19, 'flags' : { 'public' }, 'supporters' :", "'SGI' }, 'url' : 'extensions/EXT/EXT_texture.txt', }, 'GL_EXT_texture3D' : { 'number' : 6, 'flags'", ": 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' : { 'number' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', },", "}, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : { 'esnumber' : 40, 'flags' :", ": 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : { 'number' : 186, 'esnumber' : 60, 'flags'", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : { 'number'", ": 323, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : { 'esnumber' : 208, 'flags' : { 'public' },", ": { 'arbnumber' : 61, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_depth_clamp.txt',", "'GL_ARB_shader_draw_parameters' : { 'arbnumber' : 156, 'flags' : { 'public' }, 'url' :", "'esnumber' : 122, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_framebuffer_fetch.txt', 'alias' :", ": 'extensions/NV/NV_gpu_multicast.txt', }, 'GL_NV_gpu_program4' : { 'number' : 322, 'flags' : { 'public'", ": 14, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_size_array.txt', }, 'GL_OES_point_sprite' :", "95, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url'", "'../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : { 'number' : 424, 'flags' : { 'public' },", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber'", "'alias' : { 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' : { 'arbnumber' : 181, 'flags'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : {", ": { 'arbnumber' : 46, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : { 'flags' : { 'incomplete' }, 'url' :", "{ 'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : { 'esnumber' : 146,", "}, 'GL_NV_pixel_buffer_object' : { 'esnumber' : 134, 'flags' : { 'public' }, 'url'", "}, 'GL_ANGLE_instanced_arrays' : { 'esnumber' : 109, 'flags' : { 'public' }, 'url'", ": 160, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_clip_control.txt', }, 'GL_ARB_color_buffer_float' :", "'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage'", "155, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : {", "'number' : 286, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'esnumber' : 220, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt',", "220, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' :", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : {", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : { 'flags' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb.txt', },", "'IBM', 'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt', }, 'GL_EXT_robustness' : { 'esnumber' : 107,", ": 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : { 'esnumber' : 39, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : { 'flags' : { 'incomplete' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : { 'number' : 338,", "'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' : { 'flags' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : { 'number' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' : 92, 'flags'", "'arbnumber' : 96, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image'", "}, 'GL_SGIX_decimation' : { 'number' : 125, 'flags' : { 'incomplete' }, 'supporters'", "49, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_conservative_rasterization.txt', }, 'GL_INTEL_fragment_shader_ordering' : { 'number' :", "'GL_AMD_pinned_memory' : { 'number' : 411, 'flags' : { 'public' }, 'supporters' :", ": 431, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "{ 'arbnumber' : 153, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', },", "'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : { 'esnumber' : 156,", ": { 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' : { 'arbnumber' : 181, 'flags' :", "35, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : { 'esnumber' : 184, 'flags' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' : { 'flags' : { 'incomplete'", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared extension number", "'extensions/ARB/ARB_multitexture.txt', }, 'GL_ARB_occlusion_query' : { 'arbnumber' : 29, 'flags' : { 'public' },", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object' : { 'arbnumber' :", "'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : { 'esnumber' : 272, 'flags' : { 'public' },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt', }, 'GL_NV_geometry_shader4' : {", ": 512, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : { 'esnumber' : 220, 'flags' :", "'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' : { 'arbnumber' :", ": 46, 'flags' : { 'obsolete' }, 'supporters' : { 'SGI' }, 'url'", "{ 'arbnumber' : 118, 'esnumber' : 117, 'flags' : { 'public' }, 'url'", ": { 'number' : 390, 'flags' : { 'public' }, 'supporters' : {", "'extensions/ANGLE/ANGLE_framebuffer_multisample.txt', }, 'GL_ANGLE_instanced_arrays' : { 'esnumber' : 109, 'flags' : { 'public' },", "'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : { 'number' : 371, 'flags' : { 'public' },", "'GL_ARB_texture_swizzle' : { 'arbnumber' : 84, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_window_pos.txt',", ": 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : { 'number' : 500, 'flags' : { 'public'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture' : {", "'GL_NV_vertex_array_range' : { 'number' : 190, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_SGIX_texture_range' : { 'number' : 181, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 342, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_combine.txt',", "'GL_OES_mapbuffer' : { 'esnumber' : 29, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : { 'number' : 389, 'esnumber' :", "'GL_SGIX_texture_phase' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range'", "'Alias to WGL_ARB_create_context_profile not needed - see arbnumber 74.', }, 'WGL_ARB_create_context_profile' : {", "{ 'number' : 132, 'flags' : { 'incomplete', 'public' }, 'supporters' : {", "'number' : 319, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1' : { 'esnumber' : 31, 'flags' : { 'public' },", "'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' : { 'number' : 22, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/OES/OES_depth32.txt', }, 'GL_OES_depth_texture' : { 'esnumber' :", "'url' : 'extensions/AMD/AMD_blend_minmax_factor.txt', }, 'GL_AMD_compressed_3DC_texture' : { 'esnumber' : 39, 'flags' : {", ": { 'esnumber' : 212, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_primitive_bounding_box.txt',", "'supporters' : { 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial", "{ 'public' }, 'supporters' : { 'ES', 'HP', 'SGI' }, 'url' : 'extensions/EXT/EXT_copy_texture.txt',", "{ 'arbnumber' : 133, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', },", "}, 'url' : 'extensions/MESA/GLX_MESA_swap_control.txt', }, 'GL_MESA_tile_raster_order' : { 'number' : 515, 'esnumber' :", "'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : { 'esnumber' : 178, 'flags' : { 'public' },", "'GL_NVX_linked_gpu_multicast' : { 'number' : 493, 'flags' : { 'public' }, 'supporters' :", ": 4, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url'", "'alias' : { 'GLX_ARB_fbconfig_float', 'WGL_ARB_pixel_format_float' }, }, 'GL_ARB_compatibility' : { 'arbnumber' : 58,", "'arbnumber' : 68, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object'", "'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : { 'number' : 200, 'flags'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_shader.txt', }, 'GL_ARB_vertex_type_10f_11f_11f_rev'", ": 343, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": 18, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "193, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', }, 'GL_APPLE_color_buffer_packed_float' : {", "'GL_NV_texture_compression_latc' : { 'esnumber' : 130, 'flags' : { 'public' }, 'url' :", "40, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_ATC_texture.txt', }, 'GL_AMD_conservative_depth' : {", "'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : { 'esnumber' : 147, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_binning_control.txt', }, 'GL_QCOM_driver_control' : { 'esnumber' : 55,", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : { 'number'", ": 211, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' :", "'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' :", ": { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' : { 'arbnumber' :", "}, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : { 'number' : 275, 'flags' :", "}, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' : {", ": 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : { 'number' : 79, 'flags' : { 'public'", ": { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : { 'flags' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', }, 'GL_EXT_shader_pixel_local_storage2' : { 'esnumber' :", ": 8, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' :", "83, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : {", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' : { 'arbnumber' : 51,", "{ 'number' : 18, 'flags' : { 'public' }, 'supporters' : { 'ES',", "'arbnumber' : 85, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_timer_query.txt', }, 'GL_ARB_transform_feedback2'", "{ 'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : { 'esnumber' : 245,", ": 283, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'GL_EXT_multisample' }, }, 'WGL_EXT_pbuffer' : { 'number' : 171, 'flags' :", "{ 'number' : 500, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_alpha_to_coverage_dither_control.txt', },", "'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_polygon_offset.txt', }, 'GL_EXT_polygon_offset_clamp' : { 'number'", "68, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt', }, 'GL_IMG_texture_compression_pvrtc' : {", ": { 'esnumber' : 42, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt',", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', },", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite'", ": { 'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent' }, },", "'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_storage_buffer_object.txt', }, 'GL_NV_shader_thread_group' : { 'number' : 447, 'flags'", "'arbnumber' : 122, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size'", "{ 'esnumber' : 89, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', },", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_program_evaluators.txt', }, 'GL_APPLE_ycbcr_422' : { 'number'", ": 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : { 'number' : 513, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : { 'number'", "'url' : 'extensions/EXT/EXT_copy_texture.txt', }, 'GLX_EXT_create_context_es2_profile' : { 'number' : 399, 'flags' : {", "'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test' : { 'esnumber'", "{ 'public' }, 'url' : 'extensions/OES/OES_geometry_shader.txt', }, 'GL_OES_get_program_binary' : { 'esnumber' : 47,", ": 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : { 'esnumber' : 10, 'flags' : { 'public'", "'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : { 'arbnumber' : 44, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : { 'arbnumber' :", "'public' }, 'supporters' : { 'ES', 'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float'", "'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : { 'number' : 88, 'flags' : { 'incomplete' },", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' :", "'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : { 'number' : 394, 'flags' : {", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object' :", "'url' : 'extensions/NV/NV_draw_buffers.txt', }, 'GL_NV_draw_instanced' : { 'esnumber' : 141, 'flags' : {", "'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : { 'number' : 314, 'flags'", "'number' : 163, 'flags' : { 'public' }, 'supporters' : { 'SUN' },", "}, 'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' : { 'esnumber' : 85, 'flags' :", "'number' : 96, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI'", "}, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' : { 'incomplete' },", "'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : { 'esnumber' : 159, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' :", ": 473, 'esnumber' : 236, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt',", "'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' : { 'arbnumber' : 119,", ": 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : { 'esnumber' : 8, 'flags' : { 'public'", ": 322, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : {", "}, 'GL_EXT_color_buffer_half_float' : { 'esnumber' : 97, 'flags' : { 'public' }, 'url'", "{ 'esnumber' : 255, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', },", ": 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : { 'number' : 374, 'flags' : { 'public'", "'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control', 'WGL_ARB_context_flush_control' }, }, 'GL_KHR_debug' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber'", "334, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt', },", "'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber' : 95, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' :", ": 'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : { 'arbnumber' : 22, 'flags' : { 'public'", "'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_texture.txt', }, 'GLX_SGIX_dm_buffer' : { 'number' : 86,", "'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : { 'esnumber' : 169, 'flags' : { 'public' },", "'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : { 'number' : 288, 'flags' : { 'public' },", ": { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : { 'number'", "{ 'esnumber' : 7, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt', },", "'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : { 'number' : 194, 'flags' : { 'public' },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : { 'number' : 488,", "88, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "387, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_explicit_multisample.txt', }, 'GL_NV_fbo_color_attachments' : { 'esnumber' :", "'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_pbuffer.txt', }, 'WGL_EXT_pixel_format' : {", "'GL_NV_explicit_multisample' : { 'number' : 357, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt',", "{ 'number' : 254, 'flags' : { 'public' }, 'supporters' : { 'I3D'", "'public' }, 'url' : 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : { 'esnumber' : 32, 'flags'", ": { 'public' }, 'url' : 'extensions/OES/OES_vertex_type_10_10_10_2.txt', }, 'GL_OES_viewport_array' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program.txt', },", "'esnumber' : 138, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit'", "'GL_ARB_gpu_shader_fp64' : { 'arbnumber' : 89, 'flags' : { 'public' }, 'url' :", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment' : { 'number' : 82, 'flags'", "320, 'flags' : { 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : { 'number' : 438,", ": { 'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : { 'number' :", "}, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : { 'number' : 333, 'flags' :", ": 294, 'esnumber' : 6, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' : { 'number' : 462, 'esnumber' :", "'public' }, 'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : { 'esnumber' : 25, 'flags'", "'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' : { 'number' : 495, 'flags'", "'esnumber' : 199, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_extended_matrix_palette.txt', }, 'GL_OES_fbo_render_mipmap' : { 'esnumber'", "172, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video',", "'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : { 'number' : 295, 'esnumber' : 17, 'flags' :", ": 173, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'INGR' },", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : { 'esnumber' :", "'GLX_SGIX_video_source' : { 'number' : 43, 'flags' : { 'public' }, 'supporters' :", "'number' : 101, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'GL_NV_instanced_arrays' : { 'esnumber' : 145, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : { 'number' : 188,", "'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt', }, 'GL_OES_sample_shading' : { 'esnumber' : 169, 'flags'", "needed - see arbnumber 74.', }, 'WGL_ARB_create_context_profile' : { 'arbnumber' : 74, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : { 'number' : 65, 'flags' :", "'url' : 'extensions/EXT/EXT_422_pixels.txt', }, 'GL_EXT_YUV_target' : { 'esnumber' : 222, 'flags' : {", "'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber' : 204, 'flags' : {", ": 41, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt',", ": 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : { 'arbnumber' : 163, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt',", "'esnumber' : 10, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader'", "{ 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : { 'number' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_coordinate_clamp.txt', },", "}, 'url' : 'extensions/NV/NV_blend_equation_advanced.txt', 'alias' : { 'GL_NV_blend_equation_advanced_coherent' }, }, 'GL_NV_blend_minmax_factor' : {", "{ 'number' : 140, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'SUN' }, 'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : { 'number' : 166, 'flags'", "141, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : {", ": 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : { 'number' : 352, 'flags' : { 'public'", ": 'extensions/SGIX/SGIX_convolution_accuracy.txt', }, 'GL_SGIX_cube_map' : { 'number' : 130, 'flags' : { 'incomplete'", "'number' : 329, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_HP_convolution_border_modes' : { 'number' : 67, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_cull_distance.txt', }, 'GL_ARB_debug_output' : { 'arbnumber' : 104,", "}, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : { 'number' : 154, 'flags' :", "}, 'GL_EXT_texture3D' : { 'number' : 6, 'flags' : { 'public' }, 'supporters'", "fully specified. Similar to ARB_texture_cube_map.', }, 'GL_EXT_texture_cube_map_array' : { 'esnumber' : 184, 'flags'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' :", "}, 'WGL_ARB_create_context_profile' : { 'arbnumber' : 74, 'flags' : { 'public' }, 'url'", "'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' : { 'flags' : { 'obsolete' }, 'url' :", "'extensions/NV/NV_conservative_raster_underestimation.txt', }, 'GLX_NV_copy_buffer' : { 'number' : 457, 'flags' : { 'public' },", ": { 'number' : 435, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_ANGLE_translated_shader_source' : { 'esnumber' : 113, 'flags' : { 'public' }, 'url'", "{ 'number' : 55, 'flags' : { 'public' }, 'supporters' : { 'SGI'", ": { 'number' : 163, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : { 'arbnumber' : 116, 'flags' : {", "540 only.', }, 'GL_SGIX_ycrcba' : { 'number' : 203, 'flags' : { 'incomplete'", "'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : { 'esnumber' : 146, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt', }, 'GL_EXT_window_rectangles' : { 'number'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' : { 'arbnumber'", "}, 'GL_APPLE_clip_distance' : { 'esnumber' : 193, 'flags' : { 'public' }, 'url'", ": 34, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_3D.txt', }, 'GL_OES_texture_border_clamp' :", ": 493, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' : { 'esnumber' : 121, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3' : { 'esnumber'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_instanced.txt', }, 'GL_ARB_enhanced_layouts' : {", "77, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : {", "165, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_get_texture_sub_image.txt', }, 'GL_ARB_gl_spirv' : {", "{ 'esnumber' : 63, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary'", "}, 'GL_SGIX_line_quality_hint' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_line_quality_hint.txt', },", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', },", "}, 'GL_EXT_memory_object_fd' : { 'number' : 504, 'esnumber' : 281, 'flags' : {", ": 275, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : {", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp'", "'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : { 'number' : 502, 'flags' : {", "'GL_OES_compressed_paletted_texture' : { 'number' : 294, 'esnumber' : 6, 'flags' : { 'public'", "}, 'url' : 'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' : {", "'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : { 'esnumber' : 218, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture'", "'GL_ARB_compute_shader' : { 'arbnumber' : 122, 'flags' : { 'public' }, 'url' :", "'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : { 'number' : 196, 'flags'", "}, 'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : { 'number' : 237, 'flags' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias'", ": { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_hdr.txt', 'alias' : { 'GL_KHR_texture_compression_astc_ldr' }, },", ": { 'number' : 115, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' : { 'number'", "404, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'GL_EXT_transform_feedback2' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/EXT/EXT_transform_feedback2.txt', 'comments'", "'url' : 'extensions/EXT/EXT_tessellation_shader.txt', 'alias' : { 'GL_EXT_tessellation_point_size' }, }, 'GL_EXT_texenv_op' : { 'flags'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : {", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : { 'number' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : { 'arbnumber' : 35, 'flags'", "'GL_SGIX_packed_6bytes' : { 'number' : 162, 'flags' : { 'incomplete' }, 'supporters' :", "'extensions/NV/NV_internalformat_sample_query.txt', }, 'GL_NV_light_max_exponent' : { 'number' : 189, 'flags' : { 'public' },", "395, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : { 'arbnumber'", ": { 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/EXT_422_pixels.txt', },", "'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' :", "'url' : 'extensions/APPLE/APPLE_row_bytes.txt', }, 'GL_APPLE_specular_vector' : { 'number' : 159, 'flags' : {", ": { 'public' }, 'supporters' : { 'IdSoftware', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt',", "'GL_ARB_uniform_buffer_object' : { 'arbnumber' : 57, 'flags' : { 'public' }, 'url' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' : { 'arbnumber' : 99,", ": 'extensions/ARB/ARB_texture_buffer_object.txt', }, 'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' : 92, 'flags' : { 'public'", ": { 'esnumber' : 102, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt',", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : { 'number' : 92,", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region'", ": 386, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_color_matrix.txt', }, 'GL_SGI_color_table' : {", "271, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt', }, 'GL_NV_texture_shader3'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : { 'number' :", "'extensions/ARB/ARB_texture_filter_minmax.txt', }, 'GL_ARB_texture_float' : { 'arbnumber' : 41, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_instanced_arrays.txt', }, 'GLX_EXT_libglvnd' : { 'number'", "'GL_EXT_pixel_transform_color_table' : { 'number' : 139, 'flags' : { 'public' }, 'supporters' :", "61, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_robustness.txt', }, 'GL_EXT_sRGB' : { 'esnumber' :", ": 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : { 'number' : 315, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles'", "'number' : 388, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber' : 104, 'flags' : {", ": 250, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' :", "'GL_SGIS_generate_mipmap' : { 'number' : 32, 'flags' : { 'public' }, 'supporters' :", "'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : { 'number' : 497, 'flags'", "'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : { 'number' : 273, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_get_program_binary.txt', }, 'GL_ARB_get_texture_sub_image' : { 'arbnumber' : 165, 'flags' : { 'public'", ": 145, 'flags' : { 'public' }, 'supporters' : { '3DFX', 'NVIDIA', 'REND'", ": { 'number' : 483, 'esnumber' : 258, 'flags' : { 'public' },", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : { 'number' :", "{ 'number' : 300, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'GL_SGIX_fog_patchy' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale'", "'arbnumber' : 28, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'url' : 'extensions/ARB/ARB_draw_buffers_blend.txt', }, 'GL_ARB_draw_elements_base_vertex' : { 'arbnumber' : 62, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : { 'arbnumber' :", "'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' : 169, 'esnumber' : 189,", ": { 'number' : 220, 'flags' : { 'public' }, 'supporters' : {", ": { 'arbnumber' : 56, 'flags' : { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters' : { 'arbnumber'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : { 'number'", "{ 'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', },", "'url' : 'extensions/SGIX/SGIX_texture_supersample.txt', }, 'GL_SGIX_vector_ops' : { 'flags' : { 'incomplete' }, 'url'", "'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', }, 'GL_EXT_index_func'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : { 'number' : 433,", ": { 'number' : 416, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt', }, 'GL_NV_vertex_program2_option' : { 'number'", "325, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt', }, 'GL_NV_texture_compression_s3tc_update' : { 'esnumber' : 95, 'flags' :", ": 191, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : { 'number' : 156,", "'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber' : 52, 'flags' : { 'public' },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt', }, 'GL_NV_gpu_program5'", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_icc_texture.txt', }, 'GL_SGIX_igloo_interface'", ": { 'arbnumber' : 122, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt',", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' :", "9, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR', 'KGC',", "'extensions/APPLE/APPLE_texture_packed_float.txt', }, 'GL_APPLE_texture_range' : { 'number' : 367, 'flags' : { 'public' },", "{ 'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : {", "}, 'GL_OES_draw_buffers_indexed' : { 'esnumber' : 209, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : { 'arbnumber' :", "{ 'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : { 'number' : 241,", "'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : { 'number' : 239, 'flags'", "'arbnumber' : 193, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : { 'number' : 28, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber'", "'GL_SGIX_blend_cmultiply' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_blend_cmultiply.txt', }, 'GL_SGIX_calligraphic_fragment'", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' :", "}, 'GL_OES_EGL_image_external' : { 'esnumber' : 87, 'flags' : { 'public' }, 'url'", "'ES', 'HP', 'SGI', 'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : { 'number'", "}, 'GL_NV_gpu_multicast' : { 'number' : 494, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', },", ": 256, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' :", "'esnumber' : 1, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate'", "62, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2' : {", "}, 'GL_SGIX_fragment_specular_lighting' : { 'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt',", "311, 'flags' : { 'public' }, 'supporters' : { 'GREMEDY' }, 'url' :", "'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' : { 'esnumber' : 219, 'flags' : { 'public' },", ": 174, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' :", "'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl' : 'extensions/EXT/EXT_separate_shader_objects.gles.txt', 'comments' : 'Different that", "'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : { 'number' : 355, 'flags'", "126, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "'public' }, 'url' : 'extensions/ARB/GLX_ARB_robustness_application_isolation.txt', 'alias' : { 'GLX_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_robustness_isolation' :", "'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : { 'number' : 237, 'flags' : {", "'GL_NV_vertex_program1_1' : { 'number' : 266, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : { 'arbnumber' : 134, 'flags' :", ": 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : { 'number' : 153, 'flags' : { 'obsolete'", "}, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : { 'number' : 9, 'flags' :", "332, 'esnumber' : 286, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : { 'arbnumber' : 15, 'flags' : { 'public' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_rgtc.txt', }, 'GL_EXT_texture_compression_s3tc' : { 'number'", ": { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', }, 'GL_NV_conservative_raster_pre_snap' : { 'number' :", "{ '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : { 'number'", "187, 'esnumber' : 41, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'esnumber' : 57, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', },", ": { 'arbnumber' : 157, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt',", ": 184, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' :", "'arbnumber' : 81, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map'", "'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : { 'number' : 368, 'flags' : { 'public' },", "into EXT_texture_env_combine.', }, 'GL_EXT_texture' : { 'number' : 4, 'flags' : { 'public'", "}, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : { 'number' : 306, 'flags' :", ": 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : { 'esnumber' : 36, 'flags' : { 'public'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, },", "{ 'number' : 221, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'GL_NV_conservative_raster_pre_snap' : { 'number' : 517, 'esnumber' : 297, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' : 110, 'flags'", "'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', }, 'GL_EXT_vertex_array_set' : { 'flags' :", ": 75, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "}, 'GL_NV_half_float' : { 'number' : 283, 'flags' : { 'public' }, 'supporters'", "'GL_NV_texture_shader2' : { 'number' : 231, 'flags' : { 'public' }, 'supporters' :", "'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate' : { 'number' : 173, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : { 'arbnumber' : 37, 'flags' :", "'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : { 'number' : 10, 'flags' : { 'public' },", ": 289, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", "'number' : 116, 'flags' : { 'obsolete' }, 'supporters' : { 'SGI' },", "'extensions/EXT/GLX_EXT_stereo_tree.txt', }, 'GL_EXT_gpu_program_parameters' : { 'number' : 320, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', }, 'GL_ARB_texture_cube_map_array' : { 'arbnumber' : 71, 'flags' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_shader4.txt', }, 'GL_NV_geometry_shader_passthrough' : { 'number' :", "'extensions/NV/WGL_NV_DX_interop.txt', }, 'WGL_NV_DX_interop2' : { 'number' : 412, 'flags' : { 'public' },", "104, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : {", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : { 'number' :", "'public' }, 'url' : 'extensions/EXT/EXT_map_buffer_range.txt', }, 'GL_EXT_misc_attribute' : { 'number' : 31, 'flags'", ": 'extensions/NV/NV_bindless_texture.txt', }, 'GL_NV_blend_equation_advanced' : { 'number' : 433, 'esnumber' : 163, 'flags'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt', },", ": 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : { 'esnumber' : 71, 'flags' : { 'public'", ": { 'number' : 210, 'flags' : { 'public' }, 'supporters' : {", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : { 'flags' : {", "}, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : { 'number' : 39, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : { 'arbnumber' : 60,", "57, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', }, 'GL_ARB_vertex_array_bgra' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : { 'esnumber' : 133, 'flags'", ": { 'GL_SUN_multi_draw_arrays' }, }, 'GL_EXT_multi_draw_indirect' : { 'esnumber' : 205, 'flags' :", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_patchy.txt', }, 'GL_SGIX_fog_scale' : { 'number' : 161, 'flags'", "'GL_OES_get_program_binary' : { 'esnumber' : 47, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : { 'esnumber' : 159, 'flags' : {", "'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : { 'number' : 180, 'flags' : { 'incomplete' },", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias to", "67, 'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url' :", "292, 'esnumber' : 9, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "}, 'GL_EXT_robustness' : { 'esnumber' : 107, 'flags' : { 'public' }, 'url'", "}, 'GL_SGIX_fog_offset' : { 'number' : 65, 'flags' : { 'public' }, 'supporters'", "'incomplete', 'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : { 'number' : 180,", "{ 'esnumber' : 108, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt', },", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : { 'arbnumber'", ": { 'number' : 160, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt', },", "{ 'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', }, 'GL_OES_framebuffer_object' : { 'esnumber' : 10,", "'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : { 'number' : 209, 'flags'", ": 'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : { 'number' : 418, 'esnumber' : 197, 'flags'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : { 'number' : 59,", "'public' }, 'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : { 'esnumber' : 153, 'flags'", "'extensions/EXT/EXT_secondary_color.txt', }, 'GL_EXT_separate_shader_objects' : { 'number' : 377, 'esnumber' : 101, 'flags' :", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : { 'number' :", ": { 'flags' : { 'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved", ": 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : { 'number' : 7, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_pn_triangles.txt', },", "'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : { 'number' : 2, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object.txt', },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_draw_buffers_indexed.txt', }, 'GL_EXT_draw_elements_base_vertex' : { 'esnumber' : 204,", "{ 'number' : 260, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'Partial HP support.', }, 'GL_SGI_complex' : { 'number' : 87, 'flags' : {", ": { 'number' : 334, 'flags' : { 'public' }, 'supporters' : {", "167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : {", "'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : { 'esnumber' : 268, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' : { 'arbnumber' : 25, 'flags' : { 'public'", "'esnumber' : 215, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer'", "182, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : { 'number' : 508,", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_tessellation_shader.txt', }, 'GL_ARB_texture_barrier' : { 'arbnumber'", "'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' : 409, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber'", "Visual Workstation 320 / 540 only.', }, 'GL_SGIX_ycrcba' : { 'number' : 203,", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack'", "{ 'arbnumber' : 136, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', },", ": 100, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_viewport_array.txt', }, 'GL_ARB_window_pos' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : {", "}, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : { 'number' : 387, 'flags' :", "}, 'url' : 'extensions/APPLE/APPLE_sync.txt', }, 'GL_APPLE_texture_2D_limited_npot' : { 'esnumber' : 59, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt',", "{ 'esnumber' : 133, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_array.txt', },", "}, 'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : { 'number' : 208, 'flags' :", ": { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : { 'number' :", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : { 'number' : 43,", "}, 'GL_NV_texture_rectangle' : { 'number' : 229, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : { 'arbnumber' :", "'GL_ATI_texture_env_combine3' : { 'number' : 279, 'flags' : { 'public' }, 'supporters' :", "48, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'esnumber' : 43, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt', },", ": 369, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber' :", "}, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : { 'number' : 55, 'flags' :", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles'", "'url' : 'extensions/OES/OES_shader_multisample_interpolation.txt', }, 'GL_OES_single_precision' : { 'number' : 293, 'esnumber' : 18,", "'public' }, 'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : { 'number' : 432, 'flags'", "}, 'GL_NV_shader_storage_buffer_object' : { 'number' : 422, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt', },", "}, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' : {", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' :", "}, 'GL_IMG_read_format' : { 'esnumber' : 53, 'flags' : { 'public' }, 'url'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : { 'number'", "'extensions/NV/NV_robustness_video_memory_purge.txt', }, 'GL_NV_sRGB_formats' : { 'esnumber' : 148, 'flags' : { 'public' },", "'GL_EXT_blend_subtract' : { 'number' : 38, 'flags' : { 'public' }, 'supporters' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : { 'number' : 234,", ": { 'public' }, 'url' : 'extensions/ARB/ARB_base_instance.txt', }, 'GL_ARB_bindless_texture' : { 'arbnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_compatibility.txt', }, 'GL_ARB_compressed_texture_pixel_storage' : { 'arbnumber'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' :", ": { 'esnumber' : 162, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_compression_astc.txt',", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : { 'number'", "'extensions/NV/NV_present_video.txt', 'alias' : { 'GLX_NV_present_video', 'WGL_NV_present_video' }, }, 'GL_NV_primitive_restart' : { 'number' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt', }, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' : 182,", "}, 'GL_OML_resample' : { 'number' : 241, 'flags' : { 'public' }, 'supporters'", "}, 'GL_APPLE_row_bytes' : { 'number' : 372, 'flags' : { 'public' }, 'supporters'", "'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : { 'number' : 154, 'flags' : { 'incomplete' },", "'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber' : 185, 'flags'", "}, 'GL_ARB_sync' : { 'arbnumber' : 66, 'flags' : { 'public' }, 'url'", ": { 'number' : 146, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_env.txt',", "}, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', }, 'GL_IMG_texture_filter_cubic' : { 'esnumber' : 251, 'flags' :", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : { 'number' : 160,", "'number' : 250, 'flags' : { 'public' }, 'supporters' : { 'I3D' },", ": 36, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'SGI'", "'number' : 13, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : { 'number'", "'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced' :", "'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : { 'arbnumber' : 80, 'flags'", "'APPLE', 'CodeWeavers', 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : { 'number'", ": 'extensions/OES/OES_stencil1.txt', }, 'GL_OES_stencil4' : { 'esnumber' : 32, 'flags' : { 'public'", "}, 'GL_SGIX_depth_pass_instrument' : { 'number' : 205, 'flags' : { 'incomplete' }, 'supporters'", "'GL_SGIX_texture_lod_bias' : { 'number' : 84, 'flags' : { 'public' }, 'supporters' :", "'GL_EXT_buffer_storage' : { 'esnumber' : 239, 'flags' : { 'public' }, 'url' :", ": 92, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' :", "{ 'IBM', 'IMG', 'SUN' }, 'url' : 'extensions/EXT/EXT_multi_draw_arrays.txt', 'alias' : { 'GL_SUN_multi_draw_arrays' },", "{ 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', }, 'GL_KHR_blend_equation_advanced'", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt', }, 'GL_APPLE_aux_depth_stencil' : { 'number' : 370,", ": 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : { 'number' : 486, 'esnumber' : 295, 'flags'", ": 43, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "143, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_multisample.txt', }, 'GL_NV_framebuffer_multisample_coverage' : {", "445, 'flags' : { 'public' }, 'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : {", "{ 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', },", "'extensions/EXT/EXT_clip_volume_hint.txt', }, 'GL_EXT_cmyka' : { 'number' : 18, 'flags' : { 'public' },", ": { 'esnumber' : 240, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt',", "23, 'flags' : { 'public' }, 'supporters' : { 'ES', 'INGR', 'SGI' },", "177, 'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url' :", ": { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : { 'number' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt', }, 'GL_NV_vertex_program4' : { 'number' : 325, 'flags'", "}, 'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : { 'arbnumber' : 153, 'flags' :", "{ 'arbnumber' : 113, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', },", ": { 'arbnumber' : 179, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt',", "'number' : 457, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : { 'number' : 46, 'flags' : {", ": 234, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'arbnumber' : 174, 'esnumber' : 168, 'flags' : { 'public' }, 'url' :", "'flags' : { 'obsolete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt',", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint'", ": 63, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_coord_conventions.txt', }, 'GL_ARB_fragment_layer_viewport' :", "101, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber' : 109, 'flags' : { 'public' }, 'url'", ": 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' : { 'arbnumber' : 100, 'flags' : { 'public'", "}, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_vertex_array_bgra.txt', },", "}, 'url' : 'extensions/EXT/EXT_blend_color.txt', }, 'GL_EXT_blend_equation_separate' : { 'number' : 299, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_swap_group.txt',", ": 373, 'esnumber' : 76, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : { 'arbnumber'", "'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' : { 'esnumber' : 214, 'flags' : {", "'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : { 'number' : 113, 'flags' : { 'public' },", "}, 'GLX_SGI_transparent_pixel' : { 'number' : 153, 'flags' : { 'obsolete' }, 'url'", "242, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : {", "'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord' : { 'number' : 149, 'flags' : { 'public' },", "'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : { 'arbnumber' : 14, 'flags' : {", "'ARB' }, 'url' : 'extensions/ARB/ARB_shadow_ambient.txt', }, 'GL_ARB_sparse_buffer' : { 'arbnumber' : 172, 'flags'", "'extensions/ARB/ARB_copy_buffer.txt', }, 'GL_ARB_copy_image' : { 'arbnumber' : 123, 'flags' : { 'public' },", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture' : { 'number' : 9, 'flags'", ": 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : { 'number' : 481, 'esnumber' : 246, 'flags'", ": { 'arbnumber' : 174, 'esnumber' : 168, 'flags' : { 'public' },", "'esnumber' : 173, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_stencil8.txt', }, 'GL_OES_texture_storage_multisample_2d_array'", ": { 'number' : 181, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt',", "}, 'url' : 'extensions/NV/NV_pack_subimage.txt', }, 'GL_NV_packed_depth_stencil' : { 'number' : 226, 'flags' :", "'GL_ARB_instanced_arrays' : { 'arbnumber' : 49, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported on Visual Workstation 320 / 540 only.', },", ": { 'esnumber' : 195, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_packed_float.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : { 'number' :", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_set_3dfx_mode.txt', }, 'GL_MESA_shader_integer_functions' :", "}, 'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber' : 84, 'flags' :", ": { 'number' : 332, 'esnumber' : 286, 'flags' : { 'public' },", ": { 'number' : 147, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_perturb_normal.txt',", "}, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension spec with WGL_ARB_create_context_no_error.', 'alias' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : {", ": { 'arbnumber' : 31, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/ARB/ARB_texture_compression_bptc.txt', }, 'GL_ARB_texture_compression_rgtc' : { 'arbnumber' : 52, 'flags' :", "190, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "}, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' : {", "{ 'number' : 40, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' :", ": 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : { 'number' : 175, 'flags' : { 'public'", ": 'extensions/EXT/EXT_primitive_bounding_box.txt', }, 'GL_EXT_protected_textures' : { 'esnumber' : 256, 'flags' : { 'public'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' : { 'number' : 162,", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_lod.txt', }, 'GL_SGIX_pixel_tiles' : { 'number' : 46, 'flags'", "'GLX_SGI_make_current_read' : { 'number' : 42, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material'", ": 160, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "'url' : 'extensions/EXT/EXT_compiled_vertex_array.txt', }, 'GL_EXT_compressed_ETC1_RGB8_sub_texture' : { 'esnumber' : 188, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_load_store.txt', }, 'GL_ARB_shader_image_size' : { 'arbnumber' :", "'GL_AMD_shader_stencil_export' : { 'number' : 382, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : { 'number' :", "'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' :", "{ 'arbnumber' : 72, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_gather.txt', },", "}, 'GL_SGIX_vertex_preclip' : { 'number' : 210, 'flags' : { 'public' }, 'supporters'", "264, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' : {", "511, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'number' : 154, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "307, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'GL_EXT_compiled_vertex_array' : { 'number' : 97, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_NV_texture_npot_2D_mipmap' : { 'esnumber' : 96, 'flags' : { 'public' }, 'url'", "'arbnumber' : 182, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters'", ": 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : { 'number' : 271, 'flags' : { 'public'", "'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' : { 'esnumber' : 62, 'flags' : {", ": { 'arbnumber' : 137, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt',", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : { 'flags' : {", "293, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', }, 'GL_QCOM_shader_framebuffer_fetch_noncoherent' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : { 'arbnumber' :", "}, 'GL_ARB_vertex_array_bgra' : { 'arbnumber' : 68, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' : {", ": { 'number' : 196, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 254, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt', }, 'GL_EXT_shader_image_load_formatted'", ": 'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : { 'esnumber' : 45, 'flags' : { 'public'", "'arbnumber' : 54, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'number' : 403, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : { 'arbnumber' : 31, 'flags' : {", "'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : { 'number' : 74, 'flags' : { 'public' },", ": 135, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' :", "'KHR' }, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : { 'esnumber' : 208, 'flags'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier' : { 'number'", "}, 'WGL_EXT_pixel_format' : { 'number' : 170, 'flags' : { 'public' }, 'supporters'", "'number' : 404, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", ": { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt', },", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : { 'arbnumber'", "'GL_EXT_bindable_uniform' : { 'number' : 342, 'flags' : { 'public' }, 'supporters' :", ": { 'INTEL' }, 'url' : 'extensions/EXT/EXT_shader_integer_mix.txt', }, 'GL_EXT_shader_io_blocks' : { 'esnumber' :", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : {", "}, 'url' : 'extensions/NV/GLX_NV_video_out.txt', }, 'GL_NV_viewport_array' : { 'esnumber' : 202, 'flags' :", "28, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'public' }, 'supporters' : { 'ES', 'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt',", "'url' : 'extensions/ARB/ARB_depth_texture.txt', }, 'GL_ARB_derivative_control' : { 'arbnumber' : 163, 'flags' : {", ": 'extensions/OES/OES_query_matrix.txt', }, 'GL_OES_read_format' : { 'number' : 295, 'esnumber' : 17, 'flags'", ": 35, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'SGI'", "'public' }, 'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : { 'arbnumber' : 42, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt',", "1, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' : {", "'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : { 'number' : 256, 'flags' : { 'public' },", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_border_clamp.txt', }, 'GL_SGIS_texture_color_mask' : { 'number' : 214, 'flags'", "}, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array' : { 'esnumber' : 146, 'flags' :", ": { 'number' : 391, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber' : 166, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : { 'esnumber' : 179, 'flags'", "{ 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt',", "'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : { 'number' : 438, 'flags' : {", "'INGR', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : { 'number' : 7,", "{ 'number' : 176, 'flags' : { 'public' }, 'supporters' : { 'INGR',", ": { 'esnumber' : 91, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt',", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : { 'esnumber' :", ": { 'number' : 80, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : {", ": { 'number' : 247, 'flags' : { 'public' }, 'supporters' : {", "}, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_static_vertex_array.txt', }, 'GL_EXT_stencil_clear_tag' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', },", "'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', }, 'GL_ARB_texture_stencil8' : { 'arbnumber' : 150, 'flags'", ": { 'IdSoftware', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV' : { 'esnumber'", "'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : { 'number' : 222, 'esnumber' : 52,", "{ 'number' : 362, 'flags' : { 'public' }, 'supporters' : { 'AMD'", "186, 'esnumber' : 60, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'number' : 421, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' :", "'GLX_NV_delay_before_swap' : { 'number' : 445, 'flags' : { 'public' }, 'url' :", "282, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects_win32.txt', 'alias' : { 'GL_EXT_semaphore_win32'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' : 130,", ": { 'number' : 329, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : { 'esnumber' : 136, 'flags'", ": 'extensions/EXT/EXT_multi_draw_indirect.txt', }, 'GL_EXT_multiple_textures' : { 'flags' : { 'obsolete' }, 'url' :", "}, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' : {", "}, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : {", "'extensions/SGIX/SGIX_reference_plane.txt', }, 'GL_SGIX_resample' : { 'number' : 212, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : {", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation'", "}, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : { 'arbnumber' : 180, 'flags' :", "{ 'public' }, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes'", "}, 'GL_KHR_robustness' : { 'arbnumber' : 170, 'esnumber' : 190, 'flags' : {", ": { 'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : { 'esnumber' :", "'url' : 'extensions/SGIX/SGIX_fog_blend.txt', }, 'GL_SGIX_fog_factor_to_alpha' : { 'flags' : { 'incomplete' }, 'url'", "'number' : 117, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": 181, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' :", "'public' }, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_convolution_border_modes.txt', }, 'GL_HP_image_transform' :", ": { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : { 'number' :", "140, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", ": 'extensions/EXT/EXT_multisampled_render_to_texture2.txt', }, 'GL_EXT_multiview_draw_buffers' : { 'esnumber' : 125, 'flags' : { 'public'", "'GLX_MESA_copy_sub_buffer' : { 'number' : 215, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_AMD_shader_atomic_counter_ops' : { 'number' : 435, 'flags' : { 'public' }, 'supporters'", "'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : { 'esnumber' : 36, 'flags' : { 'public' },", ": 150, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : { 'arbnumber' : 3, 'flags'", "}, 'GL_SGI_color_matrix' : { 'number' : 13, 'flags' : { 'public' }, 'supporters'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : { 'esnumber' : 176, 'flags'", "'WGL_ARB_buffer_region' : { 'arbnumber' : 4, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : { 'number'", "'arbnumber' : 129, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_fragment_layer_viewport.txt', }, 'GL_ARB_fragment_program'", "'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : { 'esnumber'", "{ 'arbnumber' : 71, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', },", "'url' : 'extensions/ARB/ARB_occlusion_query2.txt', }, 'GL_ARB_parallel_shader_compile' : { 'arbnumber' : 179, 'flags' : {", "'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : { 'number' : 426, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : { 'number' : 230, 'flags'", "'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : { 'number' : 257, 'flags' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile'", "323, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : { 'arbnumber' : 161,", "'arbnumber' : 79, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' :", "'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices' : { 'esnumber' : 160, 'flags' : {", "'esnumber' : 289, 'flags' : { 'public' }, 'supporters' : { 'ANGLE' },", "{ 'esnumber' : 213, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', },", ": 309, 'esnumber' : 49, 'flags' : { 'public' }, 'supporters' : {", "7, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : {", "}, 'url' : 'extensions/EXT/EXT_texture_buffer.txt', }, 'GL_EXT_texture_buffer_object' : { 'number' : 330, 'flags' :", ": 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : { 'esnumber' : 2, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter'", ": 'extensions/ARB/ARB_sparse_texture2.txt', }, 'GL_ARB_sparse_texture_clamp' : { 'arbnumber' : 187, 'flags' : { 'public'", "}, 'GL_NV_path_rendering_shared_edge' : { 'number' : 471, 'esnumber' : 234, 'flags' : {", "262, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : {", "'GL_ARB_shader_clock' : { 'arbnumber' : 184, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' :", ": 211, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "}, 'WGL_I3D_image_buffer' : { 'number' : 253, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 113, 'flags' : { 'public' }, 'supporters' : {", ": { 'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt',", "with WGL_ARB_create_context_no_error.', 'alias' : { 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' : { 'arbnumber' :", "}, 'url' : 'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' :", "}, 'GL_EXT_texture_format_BGRA8888' : { 'esnumber' : 51, 'flags' : { 'public' }, 'url'", ": { 'esnumber' : 35, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt',", "'number' : 310, 'flags' : { 'public' }, 'supporters' : { '3DL', 'ATI',", "'url' : 'extensions/SGIX/GLX_SGIX_wait_group.txt', }, 'GL_SGIX_ycrcb' : { 'number' : 101, 'flags' : {", "{ 'esnumber' : 21, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', },", "{ 'number' : 388, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'arbnumber' : 184, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt', },", "}, 'GL_OES_extended_matrix_palette' : { 'esnumber' : 8, 'flags' : { 'public' }, 'url'", "68, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : {", "'number' : 166, 'flags' : { 'public' }, 'supporters' : { 'SUN' },", "{ 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_half_float.txt', }, 'GL_EXT_color_subtable' : { 'number' : 74,", "'incomplete' }, 'url' : 'extensions/SGIX/GLU_SGIX_icc_compress.txt', }, 'GL_SGIX_icc_texture' : { 'number' : 154, 'flags'", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', }, 'GL_SGIX_subsample' : { 'number' : 202, 'flags'", "'esnumber' : 102, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shadow_samplers.txt', }, 'GL_EXT_shared_texture_palette'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_bindless_multi_draw_indirect.txt',", "'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : { 'number' : 352, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/NV/NV_clip_space_w_scaling.txt', }, 'GL_NV_command_list' : { 'number' : 477,", "392, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : { 'arbnumber' : 140,", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt',", "'flags' : { 'public' }, 'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt',", "'url' : 'extensions/ARB/ARB_texture_filter_anisotropic.txt', }, 'GL_ARB_texture_filter_minmax' : { 'arbnumber' : 188, 'flags' : {", "}, 'url' : 'extensions/EXT/EXT_framebuffer_object.txt', }, 'GL_EXT_framebuffer_sRGB' : { 'number' : 337, 'flags' :", "'GL_AMD_gpu_shader_int64' : { 'number' : 451, 'flags' : { 'public' }, 'url' :", "'GL_NV_framebuffer_multisample' : { 'esnumber' : 143, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer'", "'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : { 'esnumber' : 91, 'flags' : {", "'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : { 'number' : 387, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Included with arbnumber 55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness'", "{ 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : { 'number' : 311,", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' :", ": 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : { 'number' : 480, 'flags' : { 'public'", "{ 'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : { 'esnumber' : 123,", "'url' : 'extensions/ARB/ARB_query_buffer_object.txt', }, 'GL_ARB_robust_buffer_access_behavior' : { 'arbnumber' : 135, 'flags' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_blended_overlay.txt', }, 'GL_SGIS_clip_band_hint' : {", ": 159, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "}, 'url' : 'extensions/NVX/NVX_conditional_render.txt', }, 'GL_NVX_gpu_memory_info' : { 'number' : 438, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : { 'arbnumber' : 141,", ": 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : { 'number' : 296, 'esnumber' : 16, 'flags'", "'number' : 207, 'flags' : { 'public' }, 'supporters' : { '3DFX' },", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_program.txt', }, 'GL_ARB_vertex_shader' : { 'arbnumber' :", "'GL_SGIS_texture_color_mask' : { 'number' : 214, 'flags' : { 'incomplete', 'public' }, 'url'", "'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : { 'number' : 380, 'flags'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : { 'flags' : { 'incomplete',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt',", "'GL_NVX_conditional_render' : { 'number' : 425, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt', },", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', }, 'GL_ARB_shading_language_420pack' : { 'arbnumber' : 108,", ": { 'arbnumber' : 88, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : { 'arbnumber'", "'GL_NV_fill_rectangle' : { 'number' : 466, 'esnumber' : 232, 'flags' : { 'public'", "{ 'number' : 195, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'public' }, 'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : { 'number' : 339,", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : { 'arbnumber'", "35, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'SGI' },", ": { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', },", "153, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : {", "{ 'arbnumber' : 190, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gl_spirv.txt', },", "'GL_EXT_swap_control' : { 'number' : 375, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' :", "78, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : {", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_aux_depth_stencil.txt', },", "'arbnumber' : 103, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : { 'arbnumber' : 12,", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt', }, 'GL_ARB_texture_env_dot3' : { 'arbnumber' :", ": { 'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : { 'number' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array'", "248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_vertex_shader.txt', }, 'GL_EXT_vertex_weighting' : {", "30, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_fog_function.txt', },", ": { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : { 'number' :", "'WGL_EXT_pbuffer' : { 'number' : 171, 'flags' : { 'public' }, 'supporters' :", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' :", "'GL_AMD_texture_gather_bias_lod' : { 'number' : 502, 'flags' : { 'public' }, 'supporters' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : { 'number' : 236,", "'GL_NV_gpu_program5' : { 'number' : 388, 'flags' : { 'public' }, 'supporters' :", "{ 'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : { 'number' : 197,", "{ 'number' : 182, 'flags' : { 'public' }, 'supporters' : { 'SUN'", "}, 'url' : 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : { 'number' : 40, 'flags' :", "}, 'GL_ARB_arrays_of_arrays' : { 'arbnumber' : 120, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : { 'number' : 63,", "'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : { 'arbnumber' : 12, 'flags' : {", "}, 'url' : 'extensions/INTEL/INTEL_fragment_shader_ordering.txt', }, 'GL_INTEL_framebuffer_CMAA' : { 'number' : 481, 'esnumber' :", ": { 'number' : 308, 'flags' : { 'public' }, 'supporters' : {", "'extensions/EXT/EXT_transform_feedback2.txt', 'comments' : 'Draft extension which is referred to by some other vendor", "{ 'public' }, 'url' : 'extensions/NV/NV_uniform_buffer_unified_memory.txt', }, 'GL_NV_vdpau_interop' : { 'number' : 396,", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', },", "}, 'GL_IBM_texture_mirrored_repeat' : { 'number' : 224, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_unpack_subimage.txt', }, 'GL_EXT_vertex_array' : { 'number' : 30,", "'public' }, 'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample'", "'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : { 'esnumber' : 212, 'flags' : {", "'GL_SGIX_pixel_texture_lod' : { 'number' : 128, 'flags' : { 'incomplete' }, 'supporters' :", ": { 'esnumber' : 245, 'flags' : { 'incomplete', 'private' }, 'url' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_tiles.txt', }, 'GL_SGIX_polynomial_ffd' : { 'number'", "'number' : 366, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming'", "{ 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : { 'number' :", "'public' }, 'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : { 'esnumber' : 36, 'flags'", "'url' : 'extensions/NV/NV_fence.txt', }, 'GL_NV_fill_rectangle' : { 'number' : 466, 'esnumber' : 232,", "'number' : 488, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt',", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt',", "'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : { 'number' : 35, 'flags'", "{ 'number' : 90, 'flags' : { 'public' }, 'supporters' : { 'HP',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : {", "'GL_AMD_vertex_shader_tessellator' : { 'number' : 363, 'flags' : { 'public' }, 'supporters' :", "'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : { 'number' : 354, 'flags' : { 'public' },", "'number' : 284, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'public' }, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_vertex_hints.txt', }, 'GL_QCOM_alpha_test'", ": 'extensions/SGI/SGI_complex_type.txt', }, 'GLX_SGI_cushion' : { 'number' : 62, 'flags' : { 'public'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_performance_monitor.txt', }, 'GL_AMD_pinned_memory' : { 'number' :", "'number' : 149, 'flags' : { 'public' }, 'supporters' : { '3DFX', 'NVIDIA',", ": { 'esnumber' : 159, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt',", ": 112, 'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : { 'flags'", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_element_array.txt', }, 'GL_APPLE_fence'", "{ 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_array_formats.txt', },", "}, 'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : { 'number' : 76, 'flags' :", "'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : { 'number' : 54,", "'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_framebuffer_downsample.txt', }, 'GL_IMG_multisampled_render_to_texture' : { 'esnumber'", "'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : { 'number' : 371, 'flags' : {", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', },", "'GL_EXT_frag_depth' : { 'esnumber' : 86, 'flags' : { 'public' }, 'url' :", "'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber' : 104, 'flags'", "}, 'url' : 'extensions/ARB/ARB_internalformat_query2.txt', }, 'GL_ARB_invalidate_subdata' : { 'arbnumber' : 132, 'flags' :", "{ 'KHR' }, 'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : { 'number' : 240,", ": 495, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_view.txt', }, 'GL_ARB_timer_query' : { 'arbnumber' :", "{ 'public' }, 'supporters' : { 'ES', 'SGI' }, 'url' : 'extensions/EXT/EXT_cmyka.txt', },", "}, 'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : { 'number' : 511, 'flags' :", ": { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_emboss.txt', },", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting'", ": { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : { 'esnumber' :", "194, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : {", "'number' : 438, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'url' : 'extensions/ARB/ARB_provoking_vertex.txt', }, 'GL_ARB_query_buffer_object' : { 'arbnumber' : 148, 'flags'", "{ 'number' : 343, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type'", "{ 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' : { 'number' : 116, 'flags' : {", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : { 'flags' : { 'incomplete'", "'extensions/ARB/ARB_framebuffer_object.txt', }, 'GL_ARB_framebuffer_sRGB' : { 'arbnumber' : 46, 'flags' : { 'public' },", "}, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : { 'arbnumber' : 5, 'flags' :", "}, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' :", "'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' : 140, 'flags' : { 'public' },", "}, 'GLX_SGIX_hyperpipe' : { 'number' : 307, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 256, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' :", "'GL_ARB_texture_gather' : { 'arbnumber' : 72, 'flags' : { 'public' }, 'url' :", "'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : { 'flags' : { 'obsolete'", "'arbnumber' : 158, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2'", "}, 'GL_OVR_multiview2' : { 'number' : 479, 'esnumber' : 242, 'flags' : {", ": { 'arbnumber' : 123, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_image.txt',", "'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : { 'esnumber' : 80, 'flags' : { 'public' },", "355, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'arbnumber' : 16, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : {", "}, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : { 'arbnumber' : 61, 'flags' :", ": 133, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' :", "'url' : 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : { 'arbnumber' : 54, 'flags' : {", "'GL_ARB_texture_border_clamp' : { 'arbnumber' : 13, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 168, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' :", "'url' : 'extensions/ARB/ARB_pipeline_statistics_query.txt', }, 'GL_ARB_pixel_buffer_object' : { 'arbnumber' : 42, 'flags' : {", ": { 'arbnumber' : 47, 'flags' : { 'public' }, 'supporters' : {", "'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : { 'arbnumber' : 6, 'flags' : {", ": { 'number' : 503, 'esnumber' : 280, 'flags' : { 'public' },", "{ 'number' : 480, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_dilate.txt', },", "'esnumber' : 290, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_point_sprite.txt',", "'url' : 'extensions/AMD/AMD_gpu_shader_half_float_fetch.txt', }, 'GL_AMD_gpu_shader_int16' : { 'number' : 507, 'flags' : {", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd'", "'number' : 201, 'flags' : { 'public' }, 'supporters' : { 'IBM' },", "{ 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_cube.txt', }, 'GL_NV_stereo_view_rendering' : { 'number' : 489,", ": 'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' : { 'number'", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_cube_map.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : {", ": 46, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', },", "233, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_geometry_shader_passthrough.txt', }, 'GL_NV_gpu_multicast' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt', }, 'GL_SGIX_fog_patchy' : {", "'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : { 'number' : 498, 'flags' : {", "'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : { 'arbnumber' : 97, 'flags' : { 'public' },", "'esnumber' : 224, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt', }, 'GL_EXT_fog_coord'", "'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression' : { 'flags' : { 'incomplete' }, 'url'", "'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : { 'number' : 37, 'esnumber'", "'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : { 'number'", "'Draft spec location unknown.', }, 'GL_OES_point_size_array' : { 'esnumber' : 14, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_blend_func_separate.txt', }, 'GL_OES_blend_subtract' : { 'esnumber'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_occlusion_query_event.txt', }, 'GL_AMD_performance_monitor' : {", ": { 'arbnumber' : 39, 'flags' : { 'public' }, 'supporters' : {", "{ 'number' : 482, 'flags' : { 'public' }, 'url' : 'extensions/EXT/GLX_EXT_libglvnd.txt', },", "}, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' : 58, 'flags' :", "'GL_OES_standard_derivatives' : { 'esnumber' : 45, 'flags' : { 'public' }, 'url' :", "'number' : 292, 'esnumber' : 9, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_ARB_indirect_parameters' : { 'arbnumber' : 154, 'flags' : { 'public' }, 'url'", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : { 'number' :", "'esnumber' : 62, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get.txt', }, 'GL_QCOM_extended_get2'", "}, 'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : { 'number' : 205, 'flags' :", "'number' : 120, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_scene_marker.txt', 'alias' :", "5, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : {", "'GL_APPLE_rgb_422' : { 'number' : 373, 'esnumber' : 76, 'flags' : { 'public'", ": { 'esnumber' : 128, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_compression_s3tc.txt',", "'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : { 'number' : 51, 'flags' : { 'public' },", "'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : { 'number' : 67, 'flags'", "14, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : { 'number' : 261,", "{ 'arbnumber' : 146, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_enhanced_layouts.txt', },", "'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' : { 'esnumber' : 172, 'flags'", "'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number' : 485, 'flags' : {", ": { 'number' : 202, 'flags' : { 'incomplete' }, 'supporters' : {", "{ 'number' : 25, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_clear_buffer_object.txt', }, 'GL_ARB_clear_texture' : { 'arbnumber' : 145,", ": { 'esnumber' : 254, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_group_vote.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_conservative_depth.txt', }, 'GL_EXT_convolution' : { 'number' :", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : { 'number' :", "}, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' : { 'number' : 136, 'flags' :", "'GL_ARB_texture_cube_map_array' : { 'arbnumber' : 71, 'flags' : { 'public' }, 'url' :", "'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_copy_sub_buffer.txt', }, 'GL_MESA_pack_invert' : { 'number' : 300, 'flags'", ": 48, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "170, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url'", "503, 'esnumber' : 280, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias'", "}, 'GL_NV_coverage_sample' : { 'esnumber' : 72, 'flags' : { 'public' }, 'url'", "}, 'GLX_MESA_agp_offset' : { 'number' : 308, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : { 'esnumber' : 119,", "'GL_NV_viewport_array' : { 'esnumber' : 202, 'flags' : { 'public' }, 'url' :", "'GL_ARB_shader_image_load_store' : { 'arbnumber' : 115, 'flags' : { 'public' }, 'url' :", "{ 'arbnumber' : 167, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', },", "69, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'IMG', 'SUN' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_integer.txt', }, 'GL_EXT_texture_lod_bias' : { 'number'", "'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query' :", ": { 'esnumber' : 113, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_translated_shader_source.txt',", "}, 'url' : 'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : { 'number' : 36, 'flags' :", "'extensions/ARB/ARB_debug_output.txt', }, 'GL_ARB_depth_buffer_float' : { 'arbnumber' : 43, 'flags' : { 'public' },", ": 144, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_buffer_storage.txt', }, 'GL_ARB_cl_event' :", "'GL_SUN_mesh_array' : { 'number' : 257, 'flags' : { 'public' }, 'supporters' :", "'number' : 98, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI'", ": { 'number' : 270, 'flags' : { 'public' }, 'supporters' : {", "'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : { 'number'", ": { 'number' : 471, 'esnumber' : 234, 'flags' : { 'public' },", "'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : { 'number' : 246, 'flags' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners2.txt', }, 'GL_NV_robustness_video_memory_purge' :", ": { 'esnumber' : 165, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_shader_framebuffer_fetch.txt',", "'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : { 'number' : 508, 'esnumber' : 284, 'flags' :", "'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : { 'arbnumber' : 60, 'flags' : { 'public' },", "'GL_EXT_index_array_formats' : { 'number' : 96, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : { 'number' : 195, 'flags' : { 'public' },", "'number' : 171, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI'", "'esnumber' : 7, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint'", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_separate_stencil.txt', }, 'GL_ATI_text_fragment_shader' : { 'number' : 269,", "'url' : 'extensions/NV/NV_texgen_emboss.txt', }, 'GL_NV_texgen_reflection' : { 'number' : 179, 'flags' : {", "'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : { 'number' : 195, 'flags' : {", "'esnumber' : 175, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture'", "'number' : 192, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', },", ": 5, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'esnumber' : 80, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt', },", "'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : { 'number' : 200, 'flags' : {", ": 467, 'esnumber' : 229, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt',", "}, 'GL_EXT_EGL_image_array' : { 'esnumber' : 278, 'flags' : { 'public' }, 'url'", "'number' : 417, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'esnumber' : 123, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array'", "'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : { 'esnumber' : 97, 'flags' : { 'public' },", ": { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : { 'number'", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', }, 'GL_ARB_texture_query_levels' : { 'arbnumber'", "'number' : 469, 'esnumber' : 231, 'flags' : { 'public' }, 'url' :", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : { 'number' : 368, 'flags'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_border_clamp.txt', }, 'GL_ARB_texture_buffer_object'", ": { 'arbnumber' : 59, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_copy_buffer.txt',", "'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : { 'esnumber' : 37, 'flags' : { 'public' },", "{ 'incomplete', 'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : { 'number' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : { 'number' : 102,", ": { 'number' : 54, 'flags' : { 'public' }, 'supporters' : {", "428, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_int64.txt',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_compute_program5.txt', }, 'GL_NV_conditional_render' : {", "{ 'number' : 299, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'url' : 'extensions/INGR/INGR_color_clamp.txt', }, 'GL_INGR_interlace_read' : { 'number' : 175, 'flags' :", "{ 'number' : 83, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', }, 'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' : 111, 'flags' : {", "{ 'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : { 'number' : 3,", ": 'extensions/ARB/ARB_texture_swizzle.txt', }, 'GL_ARB_texture_view' : { 'arbnumber' : 124, 'flags' : { 'public'", "157, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_group_vote.txt', }, 'GL_ARB_shader_image_load_store' : {", "}, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : { 'flags' : { 'incomplete' },", "'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : { 'arbnumber' : 178, 'flags'", "'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : { 'arbnumber' : 66, 'flags' : {", "'number' : 42, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", ": 'extensions/OES/OES_depth_texture.txt', }, 'GL_OES_depth_texture_cube_map' : { 'esnumber' : 136, 'flags' : { 'public'", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_shared_multisample.txt', 'alias'", "'extensions/SGIS/SGIS_texture4D.txt', }, 'GL_SGIS_texture_border_clamp' : { 'number' : 36, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : { 'arbnumber' : 121, 'flags' : { 'public'", "269, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : {", ": 225, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_post_depth_coverage.txt', }, 'GL_EXT_primitive_bounding_box' :", "{ 'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : { 'flags' : {", ": 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : { 'number' : 44, 'flags' : { 'public'", "}, 'url' : 'extensions/SGIS/SGIS_clip_band_hint.txt', }, 'GLX_SGIS_color_range' : { 'number' : 115, 'flags' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_clipmap.txt', }, 'GL_SGIX_color_matrix_accuracy'", ": 169, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' :", ": { 'MS' }, 'url' : 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : { 'flags' :", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program.txt', }, 'GL_NV_vertex_program1_1' : { 'number' : 266,", "}, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : { 'esnumber' : 123, 'flags' :", "}, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' : {", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_geometry_shader4.txt', }, 'GLX_ARB_get_proc_address' : { 'arbnumber'", "'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_rasterpos_clip.txt',", "{ 'number' : 79, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clip_volume_hint.txt', },", "'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : { 'arbnumber' : 53, 'flags' : {", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc'", "'url' : 'extensions/OES/OES_tessellation_shader.txt', }, 'GL_OES_texture_3D' : { 'esnumber' : 34, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt',", "'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : { 'esnumber' : 97, 'flags'", "{ 'public' }, 'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt', },", "'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : { 'esnumber' : 75, 'flags' : { 'public' },", ": 'extensions/OES/OES_texture_float.txt', 'alias' : { 'GL_OES_texture_half_float' }, }, 'GL_OES_texture_float_linear' : { 'esnumber' :", "'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : { 'number' : 254, 'flags'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_clamp.txt', },", "'esnumber' : 50, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : { 'arbnumber'", ": { 'arbnumber' : 130, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt',", "{ 'public' }, 'supporters' : { 'Blizzard', 'NVIDIA', 'S3', 'TransGaming' }, 'url' :", "'GL_IMG_bindless_texture' : { 'esnumber' : 270, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/GLX_OML_sync_control.txt',", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', },", ": 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : { 'GL_ARB_robustness_share_group_isolation' }, }, 'GL_ARB_sample_locations' : { 'arbnumber' :", "}, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' : { 'incomplete', 'obsolete'", "'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' : { 'esnumber' : 287, 'flags' : { 'public'", ": { 'number' : 287, 'flags' : { 'public' }, 'supporters' : {", "46, 'flags' : { 'obsolete' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'number' : 330, 'flags' : { 'public' }, 'supporters' : {", "'esnumber' : 207, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object'", "{ 'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : { 'number' : 319,", "{ 'arbnumber' : 21, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'GL_EXT_histogram' : { 'number' : 11, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 130, 'flags' : { 'incomplete' }, 'supporters' : {", "'arbnumber' : 120, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_arrays_of_arrays.txt', }, 'GL_ARB_base_instance'", "'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_single_precision.txt',", "'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : { 'number' : 304, 'flags' : { 'public' },", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragments_instrument.txt', }, 'GL_SGIX_framezoom' : { 'number' : 57,", "204, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' :", "}, 'url' : 'extensions/NV/NV_deep_texture3D.txt', }, 'GLX_NV_delay_before_swap' : { 'number' : 445, 'flags' :", "'GL_SGIS_multitexture' : { 'number' : 116, 'flags' : { 'obsolete' }, 'supporters' :", "}, 'GL_APPLE_vertex_array_object' : { 'number' : 273, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 372, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : { 'esnumber'", ": 55, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', }, 'GL_QCOM_extended_get' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' :", ": 130, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' :", ": { 'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt',", "}, 'url' : 'extensions/EXT/EXT_vertex_array.txt', }, 'GL_EXT_vertex_array_bgra' : { 'number' : 354, 'flags' :", "{ 'number' : 515, 'esnumber' : 292, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : { 'number'", "}, 'GL_AMD_program_binary_Z400' : { 'esnumber' : 48, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_async.txt', }, 'GL_SGIX_async_histogram' : { 'number' : 134, 'flags' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_shader5.txt', }, 'GL_NV_half_float' : { 'number'", "256, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : {", "'extensions/EXT/EXT_texture_object.txt', }, 'GL_EXT_texture_perturb_normal' : { 'number' : 147, 'flags' : { 'public' },", "'GL_ARB_map_buffer_alignment' : { 'arbnumber' : 113, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt', },", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt',", "}, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', }, 'GL_ARB_instanced_arrays' : { 'arbnumber' : 49, 'flags' :", "'public' }, 'url' : 'extensions/EXT/EXT_texture_format_BGRA8888.txt', }, 'GL_EXT_texture_format_sRGB_override' : { 'esnumber' : 299, 'flags'", "'url' : 'extensions/EXT/EXT_texture_sRGB_R8.txt', }, 'GL_EXT_texture_sRGB_RG8' : { 'esnumber' : 223, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_texture_storage_multisample.txt', }, 'GL_ARB_texture_swizzle' : { 'arbnumber' : 84, 'flags' :", "'alias' : { 'GLX_SGIS_shared_multisample' }, }, 'GL_SGIS_sharpen_texture' : { 'number' : 22, 'flags'", "'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : { 'number' : 210, 'flags' : { 'public' },", "'arbnumber' : 98, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : {", "'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_array.txt', }, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' : 298, 'flags'", "'esnumber' : 288, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_buffer_region.txt', }, 'WGL_ARB_create_context' : {", "114, 'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : { 'number'", "'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch' : { 'esnumber' : 165, 'flags' : { 'public' },", "'number' : 47, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'SGI'", "'esnumber' : 285, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'url' : 'extensions/SGIX/GLX_SGIX_video_resize.txt', }, 'GLX_SGIX_video_resize_float' : { 'number' : 184, 'flags' : {", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' :", "{ 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' : 275,", "GLX_ARB_create_context.', }, 'GLX_ARB_create_context_robustness' : { 'arbnumber' : 101, 'flags' : { 'public' },", ": 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : { 'number' : 156, 'flags' : { 'public'", ": 'extensions/EXT/EXT_blend_subtract.txt', }, 'GLX_EXT_buffer_age' : { 'number' : 427, 'flags' : { 'public'", "'GL_MESA_program_binary_formats' : { 'number' : 516, 'esnumber' : 294, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : { 'number'", "'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : { 'esnumber' : 108, 'flags' : { 'public' },", "{ 'esnumber' : 55, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_driver_control.txt', },", "'alias' : { 'GL_SGIX_color_type' }, }, 'GLX_SGIX_color_typeXXX' : { 'number' : 72, 'flags'", "'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', }, 'GL_NV_shader_atomic_int64' : { 'number' : 455, 'flags' : {", ": 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : { 'arbnumber' : 162, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : { 'flags' : { 'incomplete' }, 'url'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_image_samples.txt', }, 'GL_ARB_shader_texture_lod' : { 'arbnumber' : 60, 'flags'", "}, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : { 'number' : 197, 'flags' :", "'extensions/OVR/OVR_multiview.txt', }, 'GL_OVR_multiview2' : { 'number' : 479, 'esnumber' : 242, 'flags' :", "'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : { 'esnumber' : 8, 'flags'", "'ARB' }, 'url' : 'extensions/ARB/ARB_polygon_offset_clamp.txt', }, 'GL_ARB_post_depth_coverage' : { 'arbnumber' : 180, 'flags'", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : { 'number' :", "204, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_elements_base_vertex.txt', }, 'GL_EXT_draw_instanced' : {", "'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : { 'number' : 206, 'flags' : { 'public' },", "'number' : 416, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber'", "'GL_NV_image_formats' : { 'esnumber' : 200, 'flags' : { 'public' }, 'url' :", ": { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_tbuffer.txt', }, 'GL_3DFX_texture_compression_FXT1' : { 'number' :", ": 423, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : { 'arbnumber' : 45,", "'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : { 'number' : 49, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sRGB_write_control.txt', }, 'GL_EXT_scene_marker' : { 'number'", "'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' : 56, 'flags' : { 'public' }, 'url' :", ": 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : { 'number' : 50, 'flags' : { 'public'", "'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : { 'number' : 209,", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : { 'esnumber' : 37,", ": { 'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : { 'number' :", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_complex_polar.txt', }, 'GL_SGIX_convolution_accuracy' :", "{ 'esnumber' : 23, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt', },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_path_rendering.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_bindable_uniform.txt', }, 'GL_EXT_blend_color' : { 'number' : 2,", "'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image', 'WGL_NV_copy_image' }, }, 'GL_NV_coverage_sample' : { 'esnumber' :", "}, 'GL_GREMEDY_frame_terminator' : { 'number' : 345, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : { 'arbnumber' : 15, 'flags' : {", "'GL_SGIX_fog_texture' : { 'flags' : { 'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space'", "'number' : 7, 'flags' : { 'public' }, 'supporters' : { 'KGC', 'SGI'", "{ 'esnumber' : 110, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', },", "'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' :", "}, 'GL_ARB_tessellation_shader' : { 'arbnumber' : 91, 'flags' : { 'public' }, 'url'", ": 264, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' :", "'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : { 'number' : 78, 'flags' : { 'public' },", "}, 'GL_NV_packed_depth_stencil' : { 'number' : 226, 'flags' : { 'public' }, 'supporters'", "'public' }, 'url' : 'extensions/APPLE/APPLE_copy_texture_levels.txt', }, 'GL_APPLE_element_array' : { 'number' : 271, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_float64.txt', }, 'GL_NV_shader_atomic_fp16_vector' : { 'number'", "'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : { 'number' : 461, 'esnumber'", "'extensions/EXT/EXT_texture_compression_latc.txt', }, 'GL_EXT_texture_compression_rgtc' : { 'number' : 332, 'esnumber' : 286, 'flags' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : {", "'GL_MESA_ycbcr_texture' : { 'number' : 301, 'flags' : { 'public' }, 'supporters' :", "237, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : {", "}, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_global_alpha.txt', }, 'GL_SUN_mesh_array' : {", ": 143, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : {", ": { 'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : { 'number' :", "'extensions/ARB/ARB_depth_clamp.txt', }, 'GL_ARB_depth_texture' : { 'arbnumber' : 22, 'flags' : { 'public' },", "'GL_EXT_texture_view' : { 'esnumber' : 185, 'flags' : { 'public' }, 'url' :", ": 330, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": 'extensions/ARB/ARB_texture_buffer_object_rgb32.txt', }, 'GL_ARB_texture_buffer_range' : { 'arbnumber' : 139, 'flags' : { 'public'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_bgr.txt', }, 'GL_NV_bindless_multi_draw_indirect' : { 'number'", "{ 'arbnumber' : 125, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt', }, 'GL_NV_video_capture' : { 'number' : 374,", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : { 'arbnumber' :", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' :", "}, 'GL_SGIX_fragments_instrument' : { 'number' : 180, 'flags' : { 'incomplete' }, 'supporters'", "'GL_NV_draw_buffers' : { 'esnumber' : 91, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_stencil_export.txt', }, 'GL_AMD_shader_stencil_value_export'", ": 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : { 'esnumber' : 147, 'flags' : { 'public'", "{ 'number' : 395, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program.txt', }, 'GL_NV_fragment_program2' : { 'number' : 304, 'flags'", "{ 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : { 'number' : 17,", "'url' : 'extensions/EXT/EXT_texture_norm16.txt', }, 'GL_EXT_texture_object' : { 'number' : 20, 'flags' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_10f_11f_11f_rev.txt', }, 'GL_ARB_vertex_type_2_10_10_10_rev' : { 'arbnumber' : 86,", ": 197, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber' : 166, 'flags' : { 'public' },", ": { 'esnumber' : 80, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_max_level.txt',", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_depth_bounds_test.txt', }, 'GL_EXT_direct_state_access' : { 'number' : 353, 'flags'", ": 293, 'esnumber' : 18, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'url' : 'extensions/APPLE/APPLE_color_buffer_packed_float.txt', }, 'GL_APPLE_copy_texture_levels' : { 'esnumber' : 123, 'flags'", "'GL_OES_draw_texture' : { 'esnumber' : 7, 'flags' : { 'public' }, 'url' :", ": 187, 'flags' : { 'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' :", ": 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : { 'number' : 27, 'flags' : { 'public'", "}, 'GL_NV_3dvision_settings' : { 'esnumber' : 129, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/QCOM/QCOM_alpha_test.txt', }, 'GL_QCOM_binning_control' : { 'esnumber' : 119, 'flags' :", ": 221, 'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url'", ": 93, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback2.txt', }, 'GL_ARB_transform_feedback3' :", "}, 'GL_OES_query_matrix' : { 'number' : 296, 'esnumber' : 16, 'flags' : {", "}, 'GL_APPLE_framebuffer_multisample' : { 'esnumber' : 78, 'flags' : { 'public' }, 'url'", "'GL_ARB_map_buffer_range' : { 'arbnumber' : 50, 'flags' : { 'public' }, 'supporters' :", "'public' }, 'url' : 'extensions/EXT/EXT_compressed_ETC1_RGB8_sub_texture.txt', }, 'GL_EXT_conservative_depth' : { 'esnumber' : 268, 'flags'", ": { 'number' : 309, 'esnumber' : 49, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile' }, }, 'GL_EXT_cull_vertex' : {", "}, 'GL_INTEL_map_texture' : { 'number' : 429, 'flags' : { 'public' }, 'supporters'", ": 243, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_copy_depth_to_color.txt', }, 'GL_NV_copy_image' :", ": { 'GL_SGIS_color_range' }, }, 'GL_SGIS_detail_texture' : { 'number' : 21, 'flags' :", "'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt',", "{ 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program' : { 'arbnumber' : 26, 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : { 'arbnumber' : 38, 'flags'", "}, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : { 'number' : 398, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_ballot.txt', }, 'GL_ARB_shader_bit_encoding' : { 'arbnumber'", "}, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' : 140, 'flags' :", ": 'extensions/SGIX/SGIX_pixel_texture.txt', 'comments' : 'Previously shared extension number 15 with SGIS_pixel_texture.', }, 'GL_SGIX_pixel_texture_bits'", ": { 'number' : 52, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'HP', 'KGC', 'SGI', 'SUN' }, 'url' :", "'supporters' : { 'IBM', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' :", "}, 'url' : 'extensions/NV/NV_shader_thread_group.txt', }, 'GL_NV_shader_thread_shuffle' : { 'number' : 448, 'flags' :", "{ 'number' : 232, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : { 'arbnumber' : 3,", ": { 'number' : 262, 'flags' : { 'public' }, 'supporters' : {", "'number' : 515, 'esnumber' : 292, 'flags' : { 'public' }, 'supporters' :", "'supporters' : { 'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : { 'number'", ": { 'arbnumber' : 90, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt',", ": 'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : { 'number' : 40, 'flags' : { 'public'", "'public' }, 'supporters' : { 'INGR', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt', }, 'GL_EXT_subtexture'", "'esnumber' : 15, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : { 'esnumber' : 247, 'flags'", ": { 'number' : 111, 'flags' : { 'public' }, 'supporters' : {", "}, 'GL_OES_vertex_array_object' : { 'esnumber' : 71, 'flags' : { 'public' }, 'url'", "}, 'GL_SGIX_fog_layers' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', },", ": 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : { 'number' : 473, 'esnumber' : 236, 'flags'", ": 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : { 'flags' : { 'incomplete' }, 'url' :", ": 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : { 'number' : 169, 'flags' : { 'public'", "'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' : { 'number' : 435, 'flags' : { 'public' },", "'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt', }, 'GL_EXT_point_parameters' : { 'number' : 54, 'flags'", "'extensions/OES/OES_single_precision.txt', }, 'GL_OES_standard_derivatives' : { 'esnumber' : 45, 'flags' : { 'public' },", "}, 'WGL_EXT_extensions_string' : { 'number' : 168, 'flags' : { 'public' }, 'supporters'", "'flags' : { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt',", "'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' : { 'esnumber' : 24, 'flags' : { 'public' },", ": 'extensions/EXT/EXT_blend_minmax.txt', }, 'GL_EXT_blend_subtract' : { 'number' : 38, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'HP', 'IBM', 'SGI' }, 'url' :", "'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : { 'arbnumber' : 161, 'flags' : {", ": { 'esnumber' : 96, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_npot_2D_mipmap.txt',", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : { 'number' : 34,", ": { 'number' : 250, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : { 'number'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : { 'esnumber'", "'GL_NV_shader_storage_buffer_object' : { 'number' : 422, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt', },", "'GL_ARB_robustness_isolation' : { 'arbnumber' : 126, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', }, 'GL_AMD_debug_output' : { 'number' : 395, 'flags' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_counters.txt', }, 'GL_NV_shader_atomic_float' : { 'number'", "}, 'GLX_SGIX_fbconfig' : { 'number' : 49, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : { 'number' : 43, 'flags' : {", "}, 'url' : 'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber' : 193, 'flags' :", "'url' : 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' : {", ": { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : { 'number' :", "102, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', }, 'WGL_ARB_extensions_string' : {", "}, 'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : { 'number' : 199, 'flags' :", "'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : { 'number' : 77, 'flags' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_blend_equation_separate.txt', }, 'GL_EXT_blend_func_extended' : { 'esnumber'", "}, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt', }, 'GL_NV_shader_storage_buffer_object' : { 'number' : 422, 'flags' :", "}, 'url' : 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : { 'flags' : { 'incomplete' },", "'GL_EXT_texture_array' : { 'number' : 329, 'flags' : { 'public' }, 'supporters' :", "'esnumber' : 136, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture_cube_map.txt', }, 'GL_OES_draw_buffers_indexed'", ": { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_conservative_depth.txt', },", "'public' }, 'url' : 'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : { 'number' : 381, 'esnumber'", "{ 'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' : { 'number' : 294,", "'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' :", "{ 'arbnumber' : 150, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_stencil8.txt', },", ": { 'public' }, 'url' : 'extensions/ARB/ARB_texture_storage.txt', }, 'GL_ARB_texture_storage_multisample' : { 'arbnumber' :", "}, 'GL_NV_texture_border_clamp' : { 'esnumber' : 149, 'flags' : { 'public' }, 'url'", "'number' : 219, 'flags' : { 'incomplete' }, 'supporters' : { 'MESA' },", "'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : { 'number' : 497, 'flags' : {", ": { 'number' : 246, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query' : { 'number'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : { 'arbnumber'", "'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : { 'number' : 316, 'flags' : { 'public' },", "}, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber' : 201, 'flags' :", "'GL_NV_query_resource' : { 'number' : 511, 'flags' : { 'public' }, 'supporters' :", "'GL_ANGLE_texture_compression_dxt3' : { 'esnumber' : 111, 'flags' : { 'public' }, 'url' :", "}, }, 'GL_SGIS_sharpen_texture' : { 'number' : 22, 'flags' : { 'public' },", ": 155, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_pvrtc_sRGB.txt', }, 'GL_EXT_raster_multisample' :", ": 'extensions/OES/OES_stencil4.txt', }, 'GL_OES_stencil8' : { 'esnumber' : 33, 'flags' : { 'public'", ": 402, 'esnumber' : 152, 'flags' : { 'public' }, 'supporters' : {", "'WGL_EXT_pixel_format_packed_float' }, }, 'GL_EXT_packed_pixels' : { 'number' : 23, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt',", ": { 'esnumber' : 224, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt',", ": { 'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt', }, 'GL_EXT_texture_norm16' : { 'esnumber' :", "{ 'number' : 153, 'flags' : { 'obsolete' }, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', },", "'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : { 'esnumber' : 25, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : { 'number'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : {", "{ 'public' }, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', },", ": { 'esnumber' : 214, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_tessellation_shader.txt',", "'number' : 448, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_shuffle.txt', }, 'GL_NV_shadow_samplers_array'", "236, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sample_mask_override_coverage.txt', }, 'GL_NV_shader_atomic_counters' : {", ": 'extensions/ATI/ATI_texture_float.txt', }, 'GL_ATI_texture_mirror_once' : { 'number' : 221, 'flags' : { 'public'", ": { 'HP', 'IBM', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_subtract.txt', },", "206, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : {", "{ 'number' : 306, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : { 'number' :", ": 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : { 'number' : 288, 'flags' : { 'public'", "}, 'GL_ARB_explicit_attrib_location' : { 'arbnumber' : 79, 'flags' : { 'public' }, 'url'", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : { 'esnumber'", ": 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : { 'arbnumber' : 178, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_direct_state_access.txt', }, 'GL_ARB_draw_buffers' : { 'arbnumber' : 37, 'flags'", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_buffer_object.txt', 'alias' : { 'GLX_ARB_vertex_buffer_object' }, }, 'GL_ARB_vertex_program'", "393, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'GL_EXT_point_parameters' : { 'number' : 54, 'flags' : { 'public' }, 'supporters' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : { 'esnumber' : 131, 'flags'", "{ 'number' : 36, 'flags' : { 'public' }, 'supporters' : { 'HP',", "'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : { 'esnumber' : 80, 'flags' : {", ": 'extensions/EXT/WGL_EXT_pixel_format.txt', }, 'WGL_EXT_swap_control' : { 'number' : 172, 'flags' : { 'public'", "string.', }, 'GL_EXT_separate_specular_color' : { 'number' : 144, 'flags' : { 'public' },", "'GL_AMD_compressed_3DC_texture' : { 'esnumber' : 39, 'flags' : { 'public' }, 'url' :", "'arbnumber' : 30, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'arbnumber' : 2, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/WIN/WIN_phong_shading.txt', }, 'GL_WIN_scene_markerXXX' : { 'flags' : { 'obsolete' }, 'url' :", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' : 'Alias", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_mipmap_anisotropic.txt', }, 'GL_SGIX_texture_multi_buffer' : { 'number' : 53,", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pixel_format.txt', }, 'WGL_ARB_render_texture' :", "'GL_EXT_geometry_shader4' : { 'number' : 324, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : { 'esnumber' : 75, 'flags' :", "'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped but was not fully specified. Similar to ARB_texture_cube_map.',", "}, 'GL_SGIX_interlace' : { 'number' : 45, 'flags' : { 'public' }, 'supporters'", "{ 'arbnumber' : 46, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : {", "171, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : {", "{ 'arbnumber' : 88, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader5.txt', },", "170, 'esnumber' : 190, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt', },", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' :", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control' :", "'GL_EXT_external_buffer' : { 'number' : 508, 'esnumber' : 284, 'flags' : { 'public'", ": { 'arbnumber' : 13, 'flags' : { 'public' }, 'supporters' : {", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400'", "'arbnumber' : 26, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' : { 'esnumber' : 100, 'flags' : {", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats'", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary'", "}, 'GL_NV_conditional_render' : { 'number' : 346, 'esnumber' : 198, 'flags' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_framebuffer_multisample.txt', }, 'GL_EXT_framebuffer_multisample_blit_scaled' : { 'number' :", "{ 'number' : 322, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'GL_OES_element_index_uint' : { 'esnumber' : 26, 'flags' : { 'public' }, 'url' :", "}, 'GL_OES_read_format' : { 'number' : 295, 'esnumber' : 17, 'flags' : {", ": 475, 'esnumber' : 196, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_internalformat_sample_query.txt',", "'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : { 'esnumber' : 44, 'flags' : {", "}, 'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : { 'number' : 50, 'flags' :", "'extensions/NV/NV_draw_texture.txt', }, 'GL_NV_draw_vulkan_image' : { 'number' : 501, 'esnumber' : 274, 'flags' :", ": { 'number' : 60, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : { 'esnumber' : 179, 'flags' : { 'public'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/ATI/ATI_meminfo.txt', }, 'GL_ATI_pn_triangles' : {", "456, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : { 'number' :", "'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/EXT/EXT_vertex_array_set.txt', }, 'GL_EXT_vertex_array_setXXX' :", "111, 'flags' : { 'public' }, 'supporters' : { 'HP' }, 'url' :", "25, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', }, 'GL_ARB_vertex_buffer_object'", "493, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' :", "374, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'number' : 461, 'esnumber' : 225, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/ARB/ARB_multi_bind.txt', }, 'GL_ARB_multi_draw_indirect' : { 'arbnumber' : 133, 'flags' :", "{ 'GL_EXT_semaphore_fd' }, }, 'GL_EXT_memory_object_win32' : { 'number' : 505, 'esnumber' : 282,", "'url' : 'extensions/SGIX/SGIX_packed_6bytes.txt', }, 'GLX_SGIX_pbuffer' : { 'number' : 50, 'flags' : {", ": { 'number' : 216, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/AMD/AMD_vertex_shader_tessellator.txt', }, 'GL_AMD_vertex_shader_viewport_index' : { 'number' : 416, 'flags' : { 'public'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : { 'number' : 56,", "{ 'esnumber' : 35, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias'", ": { 'number' : 176, 'flags' : { 'public' }, 'supporters' : {", ": 'extensions/ANGLE/ANGLE_texture_usage.txt', }, 'GL_ANGLE_translated_shader_source' : { 'esnumber' : 113, 'flags' : { 'public'", "{ 'esnumber' : 115, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt', },", "}, 'WGL_EXT_create_context_es2_profile' : { 'number' : 400, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : { 'number' : 472, 'esnumber' : 235,", ": 'extensions/IBM/IBM_multimode_draw_arrays.txt', }, 'GL_IBM_rasterpos_clip' : { 'number' : 110, 'flags' : { 'public'", ": 179, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt', }, 'GL_EXT_shader_integer_mix' :", ": 305, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", ": { 'esnumber' : 61, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_writeonly_rendering.txt',", "'GLX_ARB_get_proc_address' : { 'arbnumber' : 2, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_sRGB.txt',", "}, 'GL_NV_shader_atomic_counters' : { 'number' : 423, 'flags' : { 'public' }, 'supporters'", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen' : { 'number' : 213,", "{ 'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp' : { 'number' : 174,", "'AMD' }, 'url' : 'extensions/AMD/AMD_gpu_shader_int16.txt', }, 'GL_AMD_gpu_shader_int64' : { 'number' : 451, 'flags'", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : { 'arbnumber'", ": { 'esnumber' : 111, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_texture_compression_dxt.txt',", "'SGI' }, 'url' : 'extensions/SGI/GLU_SGI_filter4_parameters.txt', }, 'GLX_SGI_make_current_read' : { 'number' : 42, 'flags'", "'extensions/EXT/EXT_sparse_texture.txt', }, 'GL_EXT_sparse_texture2' : { 'number' : 463, 'esnumber' : 259, 'flags' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_bptc.txt', }, 'GL_EXT_texture_compression_dxt1' : { 'number'", ": { 'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : { 'number' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' :", "{ 'arbnumber' : 83, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_rgb10_a2ui.txt', },", "'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' : { 'number' : 177,", ": 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : { 'number' : 247, 'flags' : { 'public'", "'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' : 174, 'flags' : { 'public' }, 'url' :", "}, 'GL_EXT_stencil_clear_tag' : { 'number' : 314, 'flags' : { 'public' }, 'supporters'", "'number' : 33, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "112, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : {", ": 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : { 'number' : 512, 'flags' : { 'public'", "{ 'SUN' }, 'url' : 'extensions/SUNX/SUNX_constant_data.txt', }, 'GL_SUN_convolution_border_modes' : { 'number' : 182,", ": 'extensions/ARB/ARB_sparse_texture.txt', }, 'GL_ARB_sparse_texture2' : { 'arbnumber' : 186, 'flags' : { 'public'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_sync.txt', }, 'GL_ARB_tessellation_shader' : { 'arbnumber' :", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt', }, 'GL_EXT_map_buffer_range' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_load.txt', },", "}, 'url' : 'extensions/OES/OES_compressed_paletted_texture.txt', }, 'GL_OES_copy_image' : { 'esnumber' : 208, 'flags' :", "'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : { 'number' : 205, 'flags' : { 'incomplete' },", ": 191, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : {", "}, 'GL_OES_texture_storage_multisample_2d_array' : { 'esnumber' : 174, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/SGIX/SGIX_depth_pass_instrument.txt', }, 'GL_SGIX_depth_texture' : { 'number' : 63, 'flags' :", "'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : { 'number' : 233, 'flags' : {", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', }, 'GL_SGIX_fog_texture' : { 'flags'", "}, 'url' : 'extensions/SGI/SGI_color_table.txt', 'comments' : 'Partial HP support.', }, 'GL_SGI_complex' : {", "}, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : { 'number' : 408, 'flags' :", "}, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : { 'number' : 209, 'flags' :", ": { 'arbnumber' : 178, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_gpu_shader_int64.txt',", "'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : { 'flags' : { 'public' }, 'supporters' : {", "'GL_IBM_texture_mirrored_repeat' : { 'number' : 224, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : { 'number' : 383, 'flags' :", "'url' : 'extensions/NV/NV_blend_minmax_factor.txt', }, 'GL_NV_blend_square' : { 'number' : 194, 'flags' : {", "191, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_context_flush_control.txt', 'alias' : { 'GLX_ARB_context_flush_control',", "'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' : { 'esnumber' : 162, 'flags'", "}, 'GL_ARB_vertex_program' : { 'arbnumber' : 26, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber' : 166,", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' : 137,", "'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_subtexture.txt', }, 'GL_EXT_swap_control' : { 'number' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_multisample_filter_hint.txt', }, 'GL_NV_non_square_matrices'", ": { 'number' : 291, 'esnumber' : 4, 'flags' : { 'public' },", "}, 'GL_EXT_direct_state_access' : { 'number' : 353, 'flags' : { 'public' }, 'supporters'", "'number' : 214, 'flags' : { 'incomplete', 'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', },", ": 128, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "}, 'url' : 'extensions/MESAX/MESAX_texture_stack.txt', }, 'GLX_MESA_agp_offset' : { 'number' : 308, 'flags' :", ": 'extensions/APPLE/APPLE_object_purgeable.txt', }, 'GL_APPLE_rgb_422' : { 'number' : 373, 'esnumber' : 76, 'flags'", "}, 'GL_ARB_fragment_shader' : { 'arbnumber' : 32, 'flags' : { 'public' }, 'supporters'", ": 'extensions/ARB/ARB_vertex_array_bgra.txt', }, 'GL_ARB_vertex_array_object' : { 'arbnumber' : 54, 'flags' : { 'public'", "'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' : { 'number' : 218, 'flags'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt', },", "211, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", ": { 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' :", "}, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : { 'esnumber' : 290, 'flags' :", "'url' : 'extensions/EXT/GLX_EXT_visual_info.txt', }, 'GLX_EXT_visual_rating' : { 'number' : 44, 'flags' : {", "'arbnumber' : 42, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' :", ": 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : { 'esnumber' : 134, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : { 'number' : 146, 'flags'", "{ 'number' : 423, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "}, 'GL_AMD_texture_gather_bias_lod' : { 'number' : 502, 'flags' : { 'public' }, 'supporters'", "'esnumber' : 87, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external.txt', }, 'GL_OES_EGL_image_external_essl3'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_query_buffer_object.txt', }, 'GL_AMD_sample_positions' : {", "'number' : 27, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'SUN'", "'alias' : { 'GL_EXT_geometry_point_size' }, }, 'GL_EXT_geometry_shader4' : { 'number' : 324, 'flags'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias' : {", ": 'Alias to WGL_ARB_create_context_profile not needed - see arbnumber 74.', }, 'WGL_ARB_create_context_profile' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt', },", ": 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' : { 'arbnumber' : 33, 'flags' : { 'public'", "'url' : 'extensions/DMP/DMP_program_binary.txt', }, 'GL_DMP_shader_binary' : { 'esnumber' : 88, 'flags' : {", "}, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : { 'number' : 234, 'flags' :", ": 17, 'flags' : { 'public' }, 'supporters' : { 'ES', 'HP', 'SGI',", ": { 'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : { 'arbnumber' :", "'number' : 430, 'esnumber' : 126, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_EXT_transform_feedback' : { 'number' : 352, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 443, 'esnumber' : 164, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/ARB/GLX_ARB_create_context_robustness.txt', }, 'GL_ARB_cull_distance' : { 'arbnumber' : 162, 'flags'", "}, 'GL_ARB_map_buffer_range' : { 'arbnumber' : 50, 'flags' : { 'public' }, 'supporters'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_light_max_exponent.txt', }, 'GL_NV_multisample_coverage' : {", "}, 'GLX_EXT_visual_rating' : { 'number' : 44, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 197, 'flags' : { 'public' }, 'supporters' : { 'MESA'", "'number' : 274, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt', }, 'GL_AMD_sparse_texture' : { 'number' : 426, 'flags'", "'number' : 20, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'INGR',", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_attrib_array_object.txt', }, 'GL_ATI_vertex_streams' : { 'number' : 249,", "'GL_EXT_unpack_subimage' : { 'esnumber' : 90, 'flags' : { 'public' }, 'url' :", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2'", "{ 'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : { 'number' : 480,", "{ 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : { 'flags' :", "{ 'esnumber' : 167, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage.txt', },", "'GL_AMD_shader_trinary_minmax' : { 'number' : 428, 'flags' : { 'public' }, 'supporters' :", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend' : { 'arbnumber' :", ": 49, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'esnumber' : 157, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' :", "'GLX_MESA_agp_offset' : { 'number' : 308, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : { 'esnumber' : 127, 'flags' : { 'public' },", "}, 'GL_NV_polygon_mode' : { 'esnumber' : 238, 'flags' : { 'public' }, 'url'", "'esnumber' : 9, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", "{ 'public' }, 'url' : 'extensions/NV/NV_fbo_color_attachments.txt', }, 'GL_NV_fence' : { 'number' : 222,", "55, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : { 'arbnumber' : 59, 'flags' : { 'public' },", "301, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_storage.txt', }, 'GL_EXT_memory_object' : {", "}, 'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : { 'number' : 176, 'flags' :", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_pixel_texture_bits.txt', }, 'GL_SGIX_pixel_texture_lod' : { 'number' : 128,", "'supporters' : { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_string_marker.txt', }, 'GL_HP_convolution_border_modes' : { 'number'", "175, 'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url' :", ": { 'arbnumber' : 176, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt',", "}, 'GL_ARB_fragment_program' : { 'arbnumber' : 27, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : { 'number'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt', },", "}, 'GL_EXT_texture_border_clamp' : { 'esnumber' : 182, 'flags' : { 'public' }, 'url'", "'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : { 'number' : 430, 'esnumber' : 126, 'flags' :", "}, 'GL_ARB_draw_buffers' : { 'arbnumber' : 37, 'flags' : { 'public' }, 'supporters'", "'GL_QCOM_shader_framebuffer_fetch_noncoherent' : { 'esnumber' : 277, 'flags' : { 'public' }, 'url' :", ": { 'arbnumber' : 136, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt',", "'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : { 'number'", ": 'extensions/QCOM/QCOM_framebuffer_foveated.txt', }, 'GL_QCOM_texture_foveated' : { 'esnumber' : 293, 'flags' : { 'public'", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_fragment_shader.txt', }, 'GL_ATI_map_object_buffer' : { 'number' : 288,", "'extensions/NV/NV_texture_barrier.txt', }, 'GL_NV_texture_border_clamp' : { 'esnumber' : 149, 'flags' : { 'public' },", "'esnumber' : 82, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_rgba8.txt', }, 'GL_ARM_shader_framebuffer_fetch'", "'arbnumber' : 35, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_color_clamp.txt', },", "}, 'GL_EXT_texture_sRGB' : { 'number' : 315, 'flags' : { 'public' }, 'supporters'", ": { 'number' : 15, 'flags' : { 'public' }, 'supporters' : {", "135, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robust_buffer_access_behavior.txt', }, 'GL_ARB_robustness' : {", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_mirror_clamp_to_edge.txt', }, 'GL_ARB_texture_mirrored_repeat' : { 'arbnumber'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : { 'esnumber'", "272, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : {", "{ 'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', }, 'GL_IBM_multimode_draw_arrays' : { 'number' : 200,", "'GL_OVR_multiview' : { 'number' : 478, 'esnumber' : 241, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_multi_buffer.txt', },", ": 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' : { 'number' : 88, 'flags' : { 'incomplete'", ": { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_render_texture.txt', }, 'WGL_ARB_robustness_application_isolation' : { 'arbnumber' :", ": { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : { 'esnumber'", "}, 'GL_EXT_polygon_offset' : { 'number' : 3, 'flags' : { 'public' }, 'supporters'", "364, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url'", "262, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'public' }, 'url' : 'extensions/IMG/IMG_texture_compression_pvrtc2.txt', }, 'GL_IMG_texture_env_enhanced_fixed_function' : { 'esnumber' : 58,", "'number' : 423, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "}, 'GL_SGIX_tag_sample_buffer' : { 'number' : 58, 'flags' : { 'public' }, 'supporters'", "}, 'GL_NV_read_buffer' : { 'esnumber' : 93, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt', }, 'GL_EXT_texture_sRGB' : { 'number' : 315, 'flags'", ": { 'number' : 261, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_multitexture.txt', },", ": 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two' : { 'arbnumber' : 34, 'flags' : { 'public'", "'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' : { 'number'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' :", "'url' : 'extensions/OES/OES_primitive_bounding_box.txt', }, 'GL_OES_query_matrix' : { 'number' : 296, 'esnumber' : 16,", "'arbnumber' : 19, 'flags' : { 'public' }, 'supporters' : { 'ARB' },", ": { 'esnumber' : 267, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt',", "}, 'GL_EXT_x11_sync_object' : { 'number' : 406, 'flags' : { 'public' }, 'supporters'", ": 405, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "}, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber' : 177, 'flags' :", "}, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' : { 'arbnumber' : 66, 'flags' :", ": 'extensions/SGIX/SGIX_bali_g_instruments.txt', }, 'GL_SGIX_bali_r_instruments' : { 'flags' : { 'incomplete' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_env_crossbar.txt',", "'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays' :", ": 126, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness_application_isolation.txt', 'alias' : {", "{ 'public' }, 'url' : 'extensions/ARB/ARB_ES3_2_compatibility.txt', }, 'GL_ARB_ES3_compatibility' : { 'arbnumber' : 127,", ": { 'public' }, 'url' : 'extensions/ARB/ARB_framebuffer_no_attachments.txt', }, 'GL_ARB_framebuffer_object' : { 'arbnumber' :", "259, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "{ 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : { 'number' :", "{ 'arbnumber' : 168, 'esnumber' : 191, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/ARB/ARB_shader_storage_buffer_object.txt', }, 'GL_ARB_shader_subroutine' : { 'arbnumber' : 90, 'flags'", ": { 'arbnumber' : 25, 'flags' : { 'public' }, 'supporters' : {", "'GL_ARB_texture_buffer_range' : { 'arbnumber' : 139, 'flags' : { 'public' }, 'url' :", "}, 'url' : 'extensions/ANGLE/ANGLE_depth_texture.txt', }, 'GL_ANGLE_framebuffer_blit' : { 'esnumber' : 83, 'flags' :", "{ 'arbnumber' : 192, 'esnumber' : 288, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : { 'number' : 421, 'flags' :", "}, 'url' : 'extensions/KHR/KHR_no_error.txt', }, 'GL_KHR_parallel_shader_compile' : { 'arbnumber' : 192, 'esnumber' :", "'public' }, 'url' : 'extensions/NV/NV_conservative_raster.txt', }, 'GL_NV_conservative_raster_dilate' : { 'number' : 480, 'flags'", ": 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' : { 'number' : 33, 'flags' : { 'public'", ": 275, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", ": 218, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url'", ": 144, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_generate_mipmap_sRGB.txt', }, 'GL_NV_geometry_program4' :", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow.txt', }, 'GL_SGIX_shadow_ambient' : { 'number' : 90, 'flags'", "}, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : { 'esnumber' : 91, 'flags' :", "'esnumber' : 57, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_user_clip_plane.txt', }, 'GL_INGR_color_clamp'", "'url' : 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : { 'number' : 297, 'flags' : {", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' },", "}, 'GL_NV_framebuffer_multisample_coverage' : { 'number' : 336, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt', }, 'GL_NV_path_rendering' : { 'number' : 410, 'esnumber' :", "}, 'url' : 'extensions/SGIX/SGIX_resample.txt', }, 'GL_SGIX_scalebias_hint' : { 'number' : 236, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : { 'number' : 365, 'flags' :", "}, 'url' : 'extensions/OML/OML_resample.txt', }, 'GL_OML_subsample' : { 'number' : 240, 'flags' :", "'ATI' }, 'url' : 'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : { 'number' : 498, 'flags'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : { 'arbnumber' :", ": { 'public' }, 'url' : 'extensions/ARB/ARB_conservative_depth.txt', }, 'GL_ARB_copy_buffer' : { 'arbnumber' :", "{ 'esnumber' : 193, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_clip_distance.txt', },", ": { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : { 'number' :", "}, 'GL_S3_s3tc' : { 'number' : 276, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 375, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : { 'number' : 34, 'flags'", ": 15, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt',", "{ 'arbnumber' : 74, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments'", "'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : { 'esnumber' : 64, 'flags' : { 'public' },", ": 209, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_draw_buffers_indexed.txt', }, 'GL_OES_draw_elements_base_vertex' :", "}, 'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' : { 'esnumber' : 133, 'flags' :", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' : { 'number'", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_occlusion_instrument.txt', }, 'GL_SGIX_packed_6bytes' :", "{ 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : { 'number' : 196,", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : { 'number'", ": 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber' : 104, 'flags' : { 'public'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_gpu_shader4.txt', }, 'GL_EXT_gpu_shader5' : { 'esnumber'", ": 213, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_shader_io_blocks.txt', }, 'GL_OES_shader_multisample_interpolation' :", "{ 'esnumber' : 91, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_draw_buffers.txt', },", ": { 'arbnumber' : 97, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_separate_shader_objects.txt',", "}, 'GLX_EXT_import_context' : { 'number' : 47, 'flags' : { 'public' }, 'supporters'", ": 135, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url'", "'flags' : { 'incomplete', 'private' }, 'comments' : 'Draft spec location unknown.', },", ": 43, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", ": 179, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_parallel_shader_compile.txt', }, 'GL_ARB_pipeline_statistics_query' :", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' :", "}, 'GL_OES_texture_border_clamp' : { 'esnumber' : 215, 'flags' : { 'public' }, 'url'", "}, 'GL_NV_image_formats' : { 'esnumber' : 200, 'flags' : { 'public' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_EGL_image_external_wrap_modes.txt', }, 'GL_EXT_EGL_image_storage' : { 'number'", "'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range' :", ": 'extensions/ARB/ARB_texture_stencil8.txt', }, 'GL_ARB_texture_storage' : { 'arbnumber' : 117, 'flags' : { 'public'", "'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : { 'esnumber' : 144, 'flags' : { 'public' },", ": 13, 'flags' : { 'incomplete', 'private' }, 'comments' : 'Draft spec location", "{ 'number' : 447, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt', },", "{ 'esnumber' : 218, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_view.txt', },", "'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_decode.txt', }, 'GL_EXT_texture_shared_exponent' : { 'number' : 333,", "'GL_SGIS_texture_lod' : { 'number' : 24, 'flags' : { 'public' }, 'supporters' :", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_tag_sample_buffer.txt', },", ": { 'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' : 'Extension shipped but was", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/GLU_EXT_object_space_tess.txt', }, 'GL_EXT_occlusion_query_boolean' :", "'public' }, 'url' : 'extensions/IBM/IBM_texture_mirrored_repeat.txt', }, 'GL_IBM_vertex_array_lists' : { 'number' : 201, 'flags'", "'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : { 'number' : 49, 'flags' : {", ": { 'number' : 491, 'esnumber' : 265, 'flags' : { 'public' },", "'extensions/ARB/ARB_texture_float.txt', }, 'GL_ARB_texture_gather' : { 'arbnumber' : 72, 'flags' : { 'public' },", "453, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_copy_image.txt', 'alias' : { 'GLX_NV_copy_image',", ": { 'public' }, 'supporters' : { 'INGR', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_stencil_wrap.txt',", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_offset.txt',", "'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt', }, 'GL_ARB_sampler_objects' : { 'arbnumber' : 81, 'flags'", "'supporters' : { 'KHR' }, 'url' : 'extensions/OES/OES_read_format.txt', }, 'GL_OES_required_internalformat' : { 'esnumber'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : { 'number' :", "{ 'SGI' }, 'url' : 'extensions/EXT/EXT_fragment_lighting.txt', }, 'GL_EXT_framebuffer_blit' : { 'number' : 316,", "474, 'esnumber' : 261, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_atomic_fp16_vector.txt', },", "211, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : {", ": 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : { 'number' : 408, 'flags' : { 'public'", "400, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'esnumber' : 229, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fragment_coverage_to_color.txt', }, 'GL_NV_fragment_program'", "{ 'esnumber' : 182, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', },", ": 'extensions/NV/GLX_NV_swap_group.txt', }, 'GL_NV_tessellation_program5' : { 'number' : 391, 'flags' : { 'public'", ": 'extensions/ARB/ARB_blend_func_extended.txt', }, 'GL_ARB_buffer_storage' : { 'arbnumber' : 144, 'flags' : { 'public'", "60, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_attrib_64bit.txt',", "}, 'GL_AMD_gpu_shader_int16' : { 'number' : 507, 'flags' : { 'public' }, 'supporters'", "166, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' :", "{ 'esnumber' : 139, 'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_program_binary.txt', },", "'public' }, 'url' : 'extensions/KHR/KHR_parallel_shader_compile.txt', }, 'GL_KHR_robust_buffer_access_behavior' : { 'arbnumber' : 169, 'esnumber'", ": { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_marker.txt', },", "{ 'arbnumber' : 70, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sample_shading.txt', },", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcb_subsample.txt', 'comments' : 'Supported", ": 377, 'esnumber' : 101, 'flags' : { 'public' }, 'supporters' : {", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_seamless_cubemap_per_texture.txt', }, 'GL_AMD_shader_atomic_counter_ops' :", "'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : { 'number' : 262, 'flags' : { 'public' },", ": { 'esnumber' : 211, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_gpu_shader5.txt',", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : {", ": 'extensions/SGIX/SGIX_texture_scale_bias.txt', }, 'GL_SGIX_texture_supersample' : { 'flags' : { 'incomplete' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt', }, 'GL_EXT_clip_control' : { 'esnumber' : 290,", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_hyperpipe.txt', }, 'GLU_SGIX_icc_compress' : { 'flags'", "'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : { 'esnumber' : 216, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_invalidate_subdata.txt', }, 'GL_ARB_map_buffer_alignment' : { 'arbnumber' :", "'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc.txt', }, 'GL_EXT_texture_compression_s3tc_srgb'", "{ 'number' : 98, 'flags' : { 'public' }, 'supporters' : { 'INTEL',", "{ 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object'", "{ 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt', 'esurl'", "'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : { 'number' : 311, 'flags' : { 'public' },", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_fog_scale.txt', },", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIS/SGIS_line_texgen.txt', }, 'GL_SGIS_multisample' : {", "'extensions/EXT/EXT_texture_array.txt', }, 'GL_EXT_texture_border_clamp' : { 'esnumber' : 182, 'flags' : { 'public' },", "'MESA' }, 'url' : 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : { 'number' : 301, 'flags'", "167, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : {", "189, 'esnumber' : 249, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_texture_compression_astc_sliced_3d.txt', },", "284, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : {", "{ 'public' }, 'supporters' : { '3DFX', '3DL', 'SGI' }, 'url' : 'extensions/EXT/EXT_shared_texture_palette.txt',", "'url' : 'extensions/EXT/EXT_base_instance.txt', }, 'GL_EXT_bgra' : { 'number' : 129, 'flags' : {", "'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats' : { 'number' : 96, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texgen_reflection.txt', }, 'GL_NV_texture_array' :", ": { 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' : { 'number' : 348, 'flags'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program4.txt', },", "{ 'number' : 377, 'esnumber' : 101, 'flags' : { 'public' }, 'supporters'", "{ 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : { 'number' : 133,", "{ 'number' : 30, 'flags' : { 'public' }, 'supporters' : { 'DEC',", "'url' : 'extensions/EXT/EXT_texture_compression_dxt1.txt', }, 'GL_EXT_texture_compression_latc' : { 'number' : 331, 'flags' : {", "}, 'GL_EXT_EGL_image_external_wrap_modes' : { 'esnumber' : 298, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : { 'number' : 442, 'flags' : {", "'GL_ARB_cull_distance' : { 'arbnumber' : 162, 'flags' : { 'public' }, 'url' :", "'extensions/APPLE/APPLE_ycbcr_422.txt', }, 'GL_ARB_ES2_compatibility' : { 'arbnumber' : 95, 'flags' : { 'public' },", "252, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : {", "'flags' : { 'public' }, 'supporters' : { 'DEC', 'HP', 'IBM', 'INGR', 'KGC',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_barrier.txt',", "'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : { 'esnumber' : 239, 'flags' : { 'public' },", "{ 'esnumber' : 250, 'flags' : { 'public' }, 'url' : 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program4.txt',", "}, 'GL_IMG_texture_compression_pvrtc' : { 'esnumber' : 54, 'flags' : { 'public' }, 'url'", "'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : { 'arbnumber' : 53, 'flags' : { 'public' },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_framezoom.txt', }, 'GLX_SGIX_hyperpipe' : {", "}, 'url' : 'extensions/EXT/EXT_raster_multisample.txt', }, 'GL_EXT_read_format_bgra' : { 'esnumber' : 66, 'flags' :", "'number' : 23, 'flags' : { 'public' }, 'supporters' : { 'ES', 'INGR',", "}, 'GL_EXT_frag_depth' : { 'esnumber' : 86, 'flags' : { 'public' }, 'url'", "'number' : 168, 'flags' : { 'public' }, 'supporters' : { 'INGR', 'SGI'", "}, 'GL_SGIX_nonlinear_lighting_pervertex' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_nonlinear_lighting_pervertex.txt', },", "'extensions/NV/NV_bindless_multi_draw_indirect_count.txt', }, 'GL_NV_bindless_texture' : { 'number' : 418, 'esnumber' : 197, 'flags' :", "}, 'GL_EXT_index_array_formats' : { 'number' : 96, 'flags' : { 'public' }, 'supporters'", "'arbnumber' : 184, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_clock.txt', }, 'GL_ARB_shader_draw_parameters'", "'number' : 125, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", ": 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' : 79, 'flags' : { 'public'", "}, 'GL_EXT_texture_cube_map_array' : { 'esnumber' : 184, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_shader_precision.txt', }, 'GL_ARB_shader_stencil_export' : { 'arbnumber' : 106,", "'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : { 'number' : 3, 'flags' : { 'public' },", ": 446, 'flags' : { 'public' }, 'url' : 'extensions/MESA/GLX_MESA_query_renderer.txt', }, 'GLX_MESA_release_buffers' :", ": 'extensions/SGIX/SGIX_scalebias_hint.txt', }, 'GL_SGIX_shadow' : { 'number' : 34, 'flags' : { 'public'", ": 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : { 'number' : 371, 'flags' : { 'public'", "'extensions/ATI/WGL_ATI_pixel_format_float.txt', }, 'WGL_EXT_colorspace' : { 'number' : 498, 'flags' : { 'public' },", ": { 'GREMEDY' }, 'url' : 'extensions/GREMEDY/GREMEDY_frame_terminator.txt', }, 'GL_GREMEDY_string_marker' : { 'number' :", "'GL_EXT_pixel_transform' : { 'number' : 138, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_OES_shader_image_atomic' : { 'esnumber' : 171, 'flags' : { 'public' }, 'url'", ": 'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : { 'number' : 77, 'flags' : { 'public'", ": 'extensions/NV/NV_texture_compression_s3tc_update.txt', }, 'GL_NV_texture_compression_vtc' : { 'number' : 228, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control_tear.txt',", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber' :", "}, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_occlusion_test.txt', }, 'GL_HP_texture_lighting' : {", "{ 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : { 'esnumber' : 91,", ": { 'esnumber' : 68, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_shader_binary.txt',", ": { 'number' : 228, 'flags' : { 'public' }, 'supporters' : {", "'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : { 'esnumber' : 11, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_map_buffer_range.txt', },", "{ 'flags' : { 'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' : {", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : { 'number' : 408,", ": { 'public' }, 'url' : 'extensions/ARB/ARB_conditional_render_inverted.txt', }, 'GL_ARB_conservative_depth' : { 'arbnumber' :", "'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_interlace.txt', }, 'GL_OML_resample' : { 'number'", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply'", "'supporters' : { 'APPLE', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_transform_feedback.txt', }, 'GL_EXT_transform_feedback2' : {", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_specular_vector.txt', }, 'GL_APPLE_sync' : { 'esnumber' : 124, 'flags'", ": 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' : { 'number' : 37, 'esnumber' : 65, 'flags'", "}, 'GL_SGIX_subdiv_patch' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_subdiv_patch.txt', },", "}, }, 'GL_ARB_robustness_isolation' : { 'arbnumber' : 126, 'flags' : { 'public' },", "{ 'KGC', 'SGI' }, 'url' : 'extensions/SGIS/SGIS_detail_texture.txt', }, 'GL_SGIS_fog_function' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/ANDROID/ANDROID_extension_pack_es31a.txt', }, 'GL_ANGLE_depth_texture' : { 'esnumber' :", "}, 'url' : 'extensions/SGIX/SGIX_mpeg1.txt', }, 'GL_SGIX_mpeg2' : { 'flags' : { 'incomplete' },", "'number' : 94, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI'", ": { 'number' : 305, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias' :", "'GL_SGIX_interlace' : { 'number' : 45, 'flags' : { 'public' }, 'supporters' :", "'flags' : { 'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : { 'esnumber'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shader_objects.txt',", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_texture_lod.txt', }, 'GL_ARB_shader_viewport_layer_array' : { 'arbnumber'", ": { 'public' }, 'url' : 'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : { 'number' :", "'extensions/NV/NV_vertex_attrib_integer_64bit.txt', }, 'GL_NV_vertex_buffer_unified_memory' : { 'number' : 380, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' : { 'number' : 27, 'flags' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_create_context_es2_profile.txt', 'alias' : { 'GLX_EXT_create_context_es_profile'", "{ 'number' : 319, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'number' : 453, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_gcn_shader.txt', }, 'GLX_AMD_gpu_association'", "'flags' : { 'public' }, 'supporters' : { 'ES', 'SGI' }, 'url' :", "'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' : { 'number'", "'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' : { 'arbnumber' : 50, 'flags'", ": 476, 'esnumber' : 237, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_array2.txt',", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt', 'alias'", "'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : { 'arbnumber' : 11, 'flags' : { 'public' },", "}, 'GL_ARB_shader_texture_image_samples' : { 'arbnumber' : 166, 'flags' : { 'public' }, 'url'", "'GL_EXT_separate_shader_objects' : { 'number' : 377, 'esnumber' : 101, 'flags' : { 'public'", "{ 'esnumber' : 205, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multi_draw_indirect.txt', },", "{ 'number' : 119, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "}, 'GLX_EXT_texture_from_pixmap' : { 'number' : 344, 'flags' : { 'public' }, 'supporters'", ": 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : { 'esnumber' : 102, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_cl_event.txt', }, 'GL_ARB_clear_buffer_object' : { 'arbnumber' : 121, 'flags'", "'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' : { 'esnumber'", "{ 'esnumber' : 224, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_float_blend.txt', },", ": 464, 'esnumber' : 227, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_filter_minmax.txt',", "'public' }, 'supporters' : { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine'", "'GL_SGIX_dvc' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig'", "{ 'IBM' }, 'url' : 'extensions/IBM/IBM_vertex_array_lists.txt', }, 'GL_IGLOO_swap_triangle_strip_vertex_pointerXXX' : { 'flags' : {", ": 206, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_render_snorm.txt', }, 'GL_EXT_rescale_normal' :", "'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock' : { 'arbnumber' : 184, 'flags' : { 'public' },", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_stencil_operation_extended.txt', }, 'GL_AMD_texture_gather_bias_lod' : { 'number'", ": 'extensions/ARB/ARB_texture_mirrored_repeat.txt', }, 'GL_ARB_texture_multisample' : { 'arbnumber' : 67, 'flags' : { 'public'", "{ 'number' : 10, 'flags' : { 'public' }, 'supporters' : { 'ES',", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_conditional_render.txt', }, 'GL_NV_conservative_raster' : { 'number'", ": 'extensions/ANGLE/ANGLE_instanced_arrays.txt', }, 'GL_ANGLE_pack_reverse_row_order' : { 'esnumber' : 110, 'flags' : { 'public'", "'url' : 'extensions/I3D/WGL_I3D_swap_frame_lock.txt', }, 'WGL_I3D_swap_frame_usage' : { 'number' : 255, 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : { 'arbnumber' : 97, 'flags'", ": 314, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "}, 'url' : 'extensions/ARB/ARB_fragment_shader_interlock.txt', }, 'GL_ARB_framebuffer_no_attachments' : { 'arbnumber' : 130, 'flags' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_color_subtable.txt', }, 'GL_EXT_compiled_vertex_array' : { 'number' :", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : { 'number'", ": { 'ES', 'INGR', 'SGI' }, 'url' : 'extensions/EXT/EXT_packed_pixels.txt', }, 'GL_EXT_paletted_texture' : {", "}, 'GL_ARB_gpu_shader5' : { 'arbnumber' : 88, 'flags' : { 'public' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', }, 'GLX_SGIX_wait_group' : { 'flags' : { 'incomplete'", "{ 'esnumber' : 276, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias'", "}, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat' : {", "}, 'GL_ARB_texture_cube_map_array' : { 'arbnumber' : 71, 'flags' : { 'public' }, 'url'", "'public' }, 'url' : 'extensions/EXT/EXT_texture_type_2_10_10_10_REV.txt', }, 'GL_EXT_texture_view' : { 'esnumber' : 185, 'flags'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_draw_vulkan_image.txt', },", "78, 'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_framebuffer_multisample.txt', }, 'GL_APPLE_object_purgeable' : {", ": { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : { 'number' :", "}, 'GLX_NV_copy_buffer' : { 'number' : 457, 'flags' : { 'public' }, 'supporters'", "'GL_ANGLE_program_binary' : { 'esnumber' : 139, 'flags' : { 'public' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_object_purgeable.txt',", "{ 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_blackhole_render.txt', }, 'GL_INTEL_parallel_arrays'", ": { 'arbnumber' : 167, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt',", ": 'extensions/EXT/EXT_multiview_draw_buffers.txt', }, 'GLU_EXT_nurbs_tessellator' : { 'number' : 100, 'flags' : { 'public'", "'arbnumber' : 143, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' :", ": 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : { 'number' : 519, 'flags' : { 'public'", "'url' : 'extensions/SGIX/SGIX_quad_mesh.txt', }, 'GL_SGIX_reference_plane' : { 'number' : 60, 'flags' : {", "{ 'public' }, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading'", "{ 'number' : 247, 'flags' : { 'public' }, 'supporters' : { 'ATI'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object.txt',", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_shading_language_100.txt', },", ": { 'GLX_SGIS_multisample' }, }, 'GL_SGIS_multitexture' : { 'number' : 116, 'flags' :", ": { 'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' : { 'number' :", "'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : { 'esnumber' : 142, 'flags' : { 'public' },", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_expand_normal.txt', }, 'GL_NV_texture_multisample' : { 'number'", "{ 'SGI' }, 'url' : 'extensions/SGIS/SGIS_multitexture.txt', }, 'GL_SGIS_pixel_texture' : { 'number' : 15,", ": { 'esnumber' : 23, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image.txt',", "32, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : { 'number' : 368,", ": 259, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' :", "'extensions/ATI/ATI_texture_env_combine3.txt', }, 'GL_ATI_texture_float' : { 'number' : 280, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : { 'number' : 518, 'flags'", "{ 'number' : 307, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'ANGLE' }, 'url' : 'extensions/EXT/EXT_texture_compression_s3tc_srgb.txt', }, 'GL_EXT_texture_cube_map' : { 'flags' : { 'incomplete'", "244, 'flags' : { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_matrix_palette.txt', },", ": { 'public' }, 'supporters' : { 'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt', },", "{ 'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group' : { 'number' : 350,", "}, 'url' : 'extensions/EXT/GLX_EXT_buffer_age.txt', }, 'GL_EXT_buffer_storage' : { 'esnumber' : 239, 'flags' :", "{ 'number' : 504, 'esnumber' : 281, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_shader_io_blocks.txt', }, 'GL_EXT_shader_non_constant_global_initializers' : { 'esnumber' :", ": 38, 'flags' : { 'public' }, 'supporters' : { 'HP', 'IBM', 'INGR',", "'url' : 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : { 'number' : 142, 'flags' : {", "'extensions/ARB/ARB_texture_rg.txt', }, 'GL_ARB_texture_rgb10_a2ui' : { 'arbnumber' : 83, 'flags' : { 'public' },", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' :", "}, 'url' : 'extensions/NV/NV_viewport_array2.txt', }, 'GL_NV_viewport_swizzle' : { 'number' : 483, 'esnumber' :", ": { 'esnumber' : 81, 'flags' : { 'public' }, 'url' : 'extensions/ARM/ARM_mali_shader_binary.txt',", "'GL_OES_texture_npot' : { 'esnumber' : 37, 'flags' : { 'public' }, 'url' :", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_source.txt', }, 'GLX_SGIX_visual_select_group' : { 'number' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program2.txt', }, 'GL_NV_fragment_program4' : { 'number' :", "}, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : {", ": 'extensions/EXT/EXT_debug_marker.txt', }, 'GL_EXT_depth_bounds_test' : { 'number' : 297, 'flags' : { 'public'", ": 429, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/WGL_EXT_multisample.txt', 'alias' : { 'GL_EXT_multisample' },", "'GL_EXT_abgr' : { 'number' : 1, 'flags' : { 'public' }, 'supporters' :", "}, 'GL_SGI_fft' : { 'number' : 99, 'flags' : { 'incomplete' }, 'supporters'", ": { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_dm_buffer.txt', }, 'GL_SGIX_dvc' : { 'flags' :", ": 'extensions/AMD/AMD_shader_ballot.txt', }, 'GL_AMD_shader_explicit_vertex_parameter' : { 'number' : 485, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_texture_scissor.txt', },", ": 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : { 'number' : 369, 'flags' : { 'public'", ": 99, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url'", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt',", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_framebuffer_multisample_coverage.txt', }, 'GL_NV_generate_mipmap_sRGB' : {", "{ 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' : { 'number' : 177, 'flags' : {", "'url' : 'extensions/EXT/EXT_framebuffer_sRGB.txt', 'alias' : { 'GLX_EXT_framebuffer_sRGB', 'WGL_EXT_framebuffer_sRGB' }, }, 'GL_EXT_geometry_shader' : {", "}, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : { 'number' : 195, 'flags' :", "'GL_EXT_separate_specular_color' : { 'number' : 144, 'flags' : { 'public' }, 'url' :", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : {", "131, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_platform_binary.txt', }, 'GL_NV_point_sprite' : {", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended' : { 'number' :", "}, 'GL_ARM_mali_program_binary' : { 'esnumber' : 120, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : { 'arbnumber' :", "'extensions/OES/OES_vertex_array_object.txt', }, 'GL_OES_vertex_half_float' : { 'esnumber' : 38, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : { 'esnumber' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : {", "'url' : 'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : { 'esnumber' : 81, 'flags' : {", "}, 'GL_SGIS_texture_filter4' : { 'number' : 7, 'flags' : { 'public' }, 'supporters'", "}, 'GL_NV_transform_feedback2' : { 'number' : 358, 'flags' : { 'public' }, 'supporters'", "'number' : 159, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : { 'number' : 55,", "}, 'GL_AMD_seamless_cubemap_per_texture' : { 'number' : 383, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/OES/OES_gpu_shader5.txt', }, 'GL_OES_mapbuffer' : { 'esnumber' : 29, 'flags' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_blend_square.txt', }, 'GL_NV_clip_space_w_scaling' : {", "{ 'HP', 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_visual_rating.txt', }, 'GL_EXT_win32_keyed_mutex' : { 'number'", "}, }, 'WGL_EXT_pbuffer' : { 'number' : 171, 'flags' : { 'public' },", "}, 'url' : 'extensions/EXT/GLU_EXT_nurbs_tessellator.txt', }, 'GLU_EXT_object_space_tess' : { 'number' : 75, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_compressed_3DC_texture.txt', }, 'GL_AMD_compressed_ATC_texture' : { 'esnumber'", ": { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : { 'arbnumber' :", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_multi_draw_indirect.txt', }, 'GL_AMD_name_gen_delete' : {", ": 'extensions/ARB/ARB_spirv_extensions.txt', }, 'GL_ARB_stencil_texturing' : { 'arbnumber' : 138, 'flags' : { 'public'", "'extensions/ARB/ARB_texture_query_lod.txt', }, 'GL_ARB_texture_rectangle' : { 'arbnumber' : 38, 'flags' : { 'public' },", "'url' : 'extensions/OVR/OVR_multiview2.txt', }, 'GL_OVR_multiview_multisampled_render_to_texture' : { 'esnumber' : 250, 'flags' : {", "'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : { 'number' : 493, 'flags' : { 'public' },", ": 'extensions/ARB/ARB_framebuffer_sRGB.txt', 'alias' : { 'GLX_ARB_framebuffer_sRGB', 'WGL_ARB_framebuffer_sRGB' }, }, 'GL_ARB_geometry_shader4' : { 'arbnumber'", "'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt', }, 'GL_SGIX_fog_offset' : { 'number'", "'GL_ARB_shader_precision' : { 'arbnumber' : 98, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2.txt', },", ": { 'number' : 89, 'flags' : { 'incomplete' }, 'supporters' : {", "'extensions/EXT/EXT_draw_buffers2.txt', }, 'GL_EXT_draw_buffers_indexed' : { 'esnumber' : 176, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' : { 'number' : 357,", "{ 'public' }, 'url' : 'extensions/QCOM/QCOM_extended_get2.txt', }, 'GL_QCOM_performance_monitor_global_mode' : { 'esnumber' : 56,", "}, 'GL_ARB_ES3_1_compatibility' : { 'arbnumber' : 159, 'flags' : { 'public' }, 'url'", ": { 'number' : 434, 'flags' : { 'public' }, 'supporters' : {", ": { 'esnumber' : 179, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_implicit_conversions.txt',", "{ 'public' }, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_float_pixels.txt', }, 'GL_APPLE_flush_buffer_range'", ": { 'number' : 447, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_thread_group.txt',", "'flags' : { 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt',", "'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : { 'esnumber'", "{ 'obsolete' }, 'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture'", "'number' : 196, 'flags' : { 'public' }, 'supporters' : { 'MESA' },", "}, 'GL_NV_blend_minmax_factor' : { 'number' : 510, 'esnumber' : 285, 'flags' : {", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', },", "'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : { 'arbnumber' : 57, 'flags' : { 'public' },", "'supporters' : { '3DFX', 'NVIDIA', 'REND' }, 'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' :", "'arbnumber' : 71, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add'", ": 'extensions/ARB/ARB_half_float_pixel.txt', }, 'GL_ARB_half_float_vertex' : { 'arbnumber' : 48, 'flags' : { 'public'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader.txt', }, 'GL_NV_texture_shader2' : {", "76, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_include.txt', }, 'GL_ARB_shading_language_packing' : {", ": 127, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "'esnumber' : 299, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_format_sRGB_override.txt', }, 'GLX_EXT_texture_from_pixmap'", "'number' : 483, 'esnumber' : 258, 'flags' : { 'public' }, 'url' :", ": 4, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_range.txt', }, 'GL_SGIX_texture_scale_bias'", "'WGL_EXT_create_context_es2_profile' : { 'number' : 400, 'flags' : { 'public' }, 'supporters' :", "}, 'url' : 'extensions/NV/NV_pixel_data_range.txt', }, 'GL_NV_platform_binary' : { 'esnumber' : 131, 'flags' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer'", "{ 'AMD' }, 'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : { 'esnumber' : 48,", "'GL_ARB_transform_feedback3' : { 'arbnumber' : 94, 'flags' : { 'public' }, 'url' :", "}, 'GL_ARB_draw_indirect' : { 'arbnumber' : 87, 'flags' : { 'public' }, 'url'", "}, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : { 'number' : 369, 'flags' :", "'public' }, 'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample'", ": { 'public' }, 'url' : 'extensions/EXT/EXT_gpu_shader5.txt', }, 'GL_EXT_histogram' : { 'number' :", "{ 'esnumber' : 177, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_geometry_shader.txt', 'alias'", "}, 'GL_EXT_texture_compression_astc_decode_mode' : { 'esnumber' : 276, 'flags' : { 'public' }, 'url'", "{ 'public' }, 'url' : 'extensions/NV/NV_EGL_stream_consumer_external.txt', }, 'GL_NV_alpha_to_coverage_dither_control' : { 'number' : 500,", "}, 'supporters' : { 'MS' }, 'url' : 'extensions/WIN/WIN_specular_fog.txt', }, 'WGL_NV_DX_interop' : {", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_timer_query.txt', }, 'GL_EXT_transform_feedback' : { 'number' : 352,", "'arbnumber' : 82, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_bit_encoding.txt', }, 'GL_ARB_shader_clock'", "'TransGaming' }, 'url' : 'extensions/EXT/EXT_provoking_vertex.txt', }, 'GL_EXT_pvrtc_sRGB' : { 'esnumber' : 155, 'flags'", "183, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url' :", "{ 'esnumber' : 217, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', },", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_polynomial_ffd.txt', }, 'GL_SGIX_quad_mesh' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_post_depth_coverage.txt', }, 'GL_ARB_program_interface_query' : { 'arbnumber'", "'extensions/EXT/EXT_protected_textures.txt', }, 'GL_EXT_provoking_vertex' : { 'number' : 364, 'flags' : { 'public' },", "4, 'flags' : { 'public' }, 'supporters' : { 'HP', 'INGR', 'KGC', 'SGI'", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_pixel_buffer_object.txt', }, 'GL_EXT_pixel_transform' : { 'number' : 138, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/GLX_SGIS_color_range.txt',", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt', },", "{ 'public' }, 'url' : 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : { 'number' : 430,", "55, WGL_ARB_create_context.', }, 'WGL_ARB_create_context_robustness' : { 'arbnumber' : 102, 'flags' : { 'public'", "71, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_cube_map_array.txt', }, 'GL_ARB_texture_env_add' : {", "}, 'url' : '../EGL/extensions/KHR/EGL_KHR_fence_sync.txt', }, 'GL_OES_blend_equation_separate' : { 'esnumber' : 1, 'flags' :", "}, 'GL_OES_texture_cube_map' : { 'esnumber' : 20, 'flags' : { 'public' }, 'url'", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt', }, 'GL_MTK_program_binary' : {", "'GL_SGIX_mpeg2' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', }, 'GL_SGIX_nonlinear_lighting_pervertex'", "}, 'url' : 'extensions/INGR/INGR_interlace_read.txt', }, 'GL_INTEL_conservative_rasterization' : { 'number' : 491, 'esnumber' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_shader2.txt', },", "'flags' : { 'public' }, 'supporters' : { 'IBM', 'IMG', 'SUN' }, 'url'", "90, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', }, 'GL_ARB_shader_texture_image_samples' : {", ": 86, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_type_2_10_10_10_rev.txt', }, 'GL_ARB_viewport_array' :", "'url' : 'extensions/ATI/ATI_draw_buffers.txt', }, 'GL_ATI_element_array' : { 'number' : 256, 'flags' : {", "{ 'arbnumber' : 31, 'flags' : { 'public' }, 'supporters' : { 'ARB'", ": 'extensions/EXT/EXT_occlusion_query_boolean.txt', }, 'GL_EXT_packed_depth_stencil' : { 'number' : 312, 'flags' : { 'public'", ": { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : { 'number' :", "'url' : 'extensions/ARB/WGL_ARB_make_current_read.txt', }, 'WGL_ARB_pbuffer' : { 'arbnumber' : 11, 'flags' : {", ": { 'public' }, 'supporters' : { 'ES', 'HP', 'SGI', 'SUN' }, 'url'", ": 'extensions/EXT/EXT_copy_image.txt', }, 'GL_EXT_copy_texture' : { 'number' : 10, 'flags' : { 'public'", "as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : { 'esnumber' : 90, 'flags' : { 'public'", "'esnumber' : 223, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_sRGB_RG8.txt', }, 'GL_EXT_texture_sRGB_decode'", "270, 'flags' : { 'public' }, 'supporters' : { 'APPLE' }, 'url' :", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', },", "'url' : 'extensions/EXT/EXT_frag_depth.txt', }, 'GL_EXT_fragment_lighting' : { 'number' : 102, 'flags' : {", "'url' : 'extensions/NV/NV_copy_buffer.txt', }, 'GL_NV_copy_depth_to_color' : { 'number' : 243, 'flags' : {", ": 'extensions/NV/NV_draw_instanced.txt', }, 'GL_NV_draw_texture' : { 'number' : 430, 'esnumber' : 126, 'flags'", "'arbnumber' : 67, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_texture_multisample.txt', }, 'GL_ARB_texture_non_power_of_two'", "'extensions/NV/WGL_NV_delay_before_swap.txt', }, 'WGL_NV_gpu_affinity' : { 'number' : 355, 'flags' : { 'public' },", ": { 'arbnumber' : 45, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number' : 492, 'esnumber' :", ": { 'number' : 366, 'flags' : { 'public' }, 'supporters' : {", "'supporters' : { 'INTEL', 'SGI' }, 'url' : 'extensions/EXT/EXT_index_func.txt', }, 'GL_EXT_index_material' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_rectangle.txt', }, 'GL_ARB_texture_rg' : {", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_float.txt', },", ": 5, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_compressed_ETC1_RGB8_texture.txt', }, 'GL_OES_compressed_paletted_texture' :", ": 'extensions/ATI/ATI_vertex_array_object.txt', }, 'GL_ATI_vertex_attrib_array_object' : { 'number' : 290, 'flags' : { 'public'", "'extensions/SGI/GLX_SGI_make_current_read.txt', }, 'GLX_SGI_swap_control' : { 'number' : 40, 'flags' : { 'public' },", "'extensions/EXT/EXT_clear_texture.txt', }, 'GL_EXT_clip_cull_distance' : { 'esnumber' : 257, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'HP', 'SUN' }, 'url' : 'extensions/EXT/EXT_pixel_transform_color_table.txt',", "}, 'GL_DMP_shader_binary' : { 'esnumber' : 88, 'flags' : { 'public' }, 'url'", "94, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : {", "'number' : 363, 'flags' : { 'public' }, 'supporters' : { 'AMD' },", "}, 'GL_ARB_shader_bit_encoding' : { 'arbnumber' : 82, 'flags' : { 'public' }, 'url'", "'number' : 471, 'esnumber' : 234, 'flags' : { 'public' }, 'url' :", "{ 'HP', 'IBM', 'INGR', 'KGC', 'SGI' }, 'url' : 'extensions/EXT/EXT_blend_logic_op.txt', }, 'GL_EXT_blend_minmax' :", ": 183, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url'", "}, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' : { 'esnumber' : 77, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_cmyka.txt', }, 'GL_EXT_color_buffer_float' : { 'esnumber' : 137, 'flags' :", "{ 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_map_texture.txt', }, 'GL_INTEL_blackhole_render' : { 'number' : 521,", "'GL_NV_path_rendering' : { 'number' : 410, 'esnumber' : 199, 'flags' : { 'public'", "'number' : 158, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA'", "}, 'supporters' : { 'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : {", "'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_pbuffer.txt',", "'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument' : { 'number' : 205, 'flags' : {", "'GL_EXT_memory_object_fd' : { 'number' : 504, 'esnumber' : 281, 'flags' : { 'public'", "'GL_OES_shader_io_blocks' : { 'esnumber' : 213, 'flags' : { 'public' }, 'url' :", "'url' : 'extensions/AMD/AMD_interleaved_elements.txt', }, 'GL_AMD_multi_draw_indirect' : { 'number' : 408, 'flags' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_rectangle_compressed.txt', }, 'GL_NV_texture_shader' : { 'number' : 230,", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt',", ": 'extensions/APPLE/APPLE_fence.txt', }, 'GL_APPLE_float_pixels' : { 'number' : 368, 'flags' : { 'public'", "'number' : 520, 'esnumber' : 122, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_blend.txt', },", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear' : {", ": 34, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI' },", "'number' : 191, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'supporters' : { 'IBM', 'INGR' }, 'url' : 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op'", "{ 'esnumber' : 269, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_clear_texture.txt', },", ": 216, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_buffer.txt', }, 'GL_OES_texture_compression_astc' :", "148, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_sRGB_formats.txt', }, 'GL_NV_sample_locations' : {", "{ 'number' : 465, 'esnumber' : 228, 'flags' : { 'public' }, 'url'", ": { 'number' : 141, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : { 'number' : 340, 'flags' :", "'public' }, 'url' : 'extensions/NV/NV_fragment_shader_interlock.txt', }, 'GL_NV_framebuffer_blit' : { 'esnumber' : 142, 'flags'", "220, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_EGL_image_external_essl3.txt', }, 'GL_OES_EGL_sync' : {", ": { 'flags' : { 'public' }, 'url' : 'extensions/SGIX/SGIX_fog_texture.txt', }, 'GL_SGIX_fragment_lighting_space' :", ": 154, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url'", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : { 'number' : 143, 'flags'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_compression_latc.txt',", ": 408, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback.txt', }, 'GL_NV_transform_feedback2'", ": { 'arbnumber' : 128, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_explicit_uniform_location.txt',", "'MESA' }, 'url' : 'extensions/MESA/MESA_tile_raster_order.txt', }, 'GL_MESA_window_pos' : { 'number' : 197, 'flags'", "}, 'GL_NV_bgr' : { 'esnumber' : 135, 'flags' : { 'public' }, 'url'", "{ 'number' : 285, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/EXT/EXT_clip_control.txt',", ": 232, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' :", "{ 'number' : 312, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_packed_depth_stencil.txt', },", "{ 'number' : 519, 'flags' : { 'public' }, 'supporters' : { 'AMD'", ": 136, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ycrcba.txt', }, 'GL_SGI_color_matrix' : { 'number'", ": 28, 'flags' : { 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI'", "'url' : 'extensions/APPLE/APPLE_rgb_422.txt', }, 'GL_APPLE_row_bytes' : { 'number' : 372, 'flags' : {", "'KHR' }, 'url' : 'extensions/OES/OES_byte_coordinates.txt', }, 'GL_OES_compressed_ETC1_RGB8_texture' : { 'esnumber' : 5, 'flags'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_async_histogram.txt', }, 'GL_SGIX_async_pixel' : {", ": { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : { 'flags' :", "'url' : 'extensions/SUN/SUN_triangle_list.txt', }, 'GL_SUN_vertex' : { 'number' : 166, 'flags' : {", "'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : { 'arbnumber' : 44, 'flags'", ": 157, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "'url' : 'extensions/ARB/ARB_gpu_shader_fp64.txt', }, 'GL_ARB_gpu_shader_int64' : { 'arbnumber' : 178, 'flags' : {", "314, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' : { 'arbnumber' : 114, 'flags' : {", "}, 'GL_NV_vertex_program3' : { 'number' : 306, 'flags' : { 'public' }, 'supporters'", "'SUN' }, 'url' : 'extensions/SGI/SGI_texture_color_table.txt', }, 'GLX_SGI_transparent_pixel' : { 'number' : 153, 'flags'", "'number' : 485, 'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod'", "{ 'number' : 170, 'flags' : { 'public' }, 'supporters' : { 'INGR',", ": { 'public' }, 'supporters' : { 'IBM', 'KGC', 'SGI' }, 'url' :", ": { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_non_power_of_two.txt', },", ": 'extensions/ARB/ARB_transform_feedback3.txt', }, 'GL_ARB_transform_feedback_instanced' : { 'arbnumber' : 109, 'flags' : { 'public'", ": { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', }, 'GL_IGLOO_viewport_offsetXXX' : { 'flags'", "'arbnumber' : 172, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_buffer.txt', }, 'GL_ARB_sparse_texture'", "}, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap.txt', }, 'GL_NV_conservative_raster_pre_snap_triangles' : { 'number' : 487, 'esnumber' :", "}, 'GL_EXT_vertex_array_setXXX' : { 'flags' : { 'public' }, 'supporters' : { 'IBM'", "396, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "483, 'esnumber' : 258, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', },", "'url' : 'extensions/ARB/ARB_window_pos.txt', }, 'GL_ARM_mali_program_binary' : { 'esnumber' : 120, 'flags' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_lod.txt', }, 'GL_SGIS_texture_select' : {", "}, 'GL_SGIX_blend_alpha_minmax' : { 'number' : 119, 'flags' : { 'public' }, 'supporters'", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_blend_cadd.txt', }, 'GL_SGIX_blend_cmultiply' : { 'flags'", "'extensions/ARB/ARB_sampler_objects.txt', }, 'GL_ARB_seamless_cube_map' : { 'arbnumber' : 65, 'flags' : { 'public' },", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_subsample.txt', }, 'GLX_SGIX_swap_barrier'", "'url' : 'extensions/EXT/WGL_EXT_create_context_es2_profile.txt', 'alias' : { 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' : { 'number'", "'url' : 'extensions/EXT/EXT_texenv_op.txt', 'comments' : 'Evolved into EXT_texture_env_combine.', }, 'GL_EXT_texture' : { 'number'", "}, 'GL_SGIX_sprite' : { 'number' : 52, 'flags' : { 'public' }, 'supporters'", "{ 'number' : 208, 'flags' : { 'public' }, 'supporters' : { '3DFX'", "88, 'flags' : { 'public' }, 'url' : 'extensions/DMP/DMP_shader_binary.txt', }, 'GL_EXT_422_pixels' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : { 'arbnumber' :", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint'", "{ 'arbnumber' : 4, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "133, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_multi_draw_indirect.txt', }, 'GL_ARB_multisample' : {", "'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions' : { 'arbnumber' : 194, 'flags' : {", ": 521, 'esnumber' : 300, 'flags' : { 'public' }, 'supporters' : {", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : { 'esnumber' : 127,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_register_combiners.txt', }, 'GL_NV_register_combiners2'", "'url' : 'extensions/NV/NV_shader_atomic_float.txt', }, 'GL_NV_shader_atomic_float64' : { 'number' : 488, 'flags' : {", ": 91, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : {", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : {", "'url' : 'extensions/AMD/AMD_pinned_memory.txt', }, 'GL_AMD_program_binary_Z400' : { 'esnumber' : 48, 'flags' : {", "299, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'url' : 'extensions/ARB/ARB_internalformat_query.txt', }, 'GL_ARB_internalformat_query2' : { 'arbnumber' : 131, 'flags' : {", "'number' : 14, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI',", "'number' : 93, 'flags' : { 'public' }, 'supporters' : { 'INTEL', 'SGI'", ": { 'public' }, 'url' : 'extensions/NV/NV_read_depth_stencil.txt', }, 'GL_NV_register_combiners' : { 'number' :", ": { 'arbnumber' : 159, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt',", "}, 'url' : 'extensions/EXT/EXT_texture_cube_map_array.txt', }, 'GL_EXT_texture_env' : { 'number' : 146, 'flags' :", "'number' : 297, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "{ 'arbnumber' : 47, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "16, 'flags' : { 'public' }, 'supporters' : { 'KHR' }, 'url' :", "'extensions/OES/OES_sample_shading.txt', }, 'GL_OES_sample_variables' : { 'esnumber' : 170, 'flags' : { 'public' },", ": { 'SUN' }, 'url' : 'extensions/SUN/SUN_mesh_array.txt', }, 'GL_SUN_slice_accum' : { 'number' :", ": { 'esnumber' : 169, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_sample_shading.txt',", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_primitive_restart.txt', }, 'GL_NV_query_resource' : { 'number' : 511,", "'GL_ARB_pipeline_statistics_query' : { 'arbnumber' : 171, 'flags' : { 'public' }, 'url' :", "40, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program2_option.txt', }, 'GL_NV_vertex_program3' : {", ": { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_make_current_read.txt', }, 'WGL_EXT_multisample' : { 'number'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_evaluators.txt', }, 'GL_NV_explicit_attrib_location' : { 'esnumber' :", "222, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_YUV_target.txt', }, 'GL_EXT_abgr' : {", ": { 'esnumber' : 201, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_shader_noperspective_interpolation.txt',", "'esnumber' : 247, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_blend_func_extended.txt', }, 'GL_EXT_blend_func_separate'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt',", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_geometry_program4.txt',", "}, 'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : { 'number' : 232, 'flags' :", "'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : { 'number' : 140, 'flags' : {", "'number' : 213, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cylinder_texgen.txt', }, 'GL_SGIX_datapipe' :", "}, 'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : { 'esnumber' : 8, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_explicit_attrib_location.txt', }, 'GL_ARB_explicit_uniform_location' : { 'arbnumber' : 128,", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_depth_buffer_float.txt', }, 'GL_ARB_depth_clamp' : { 'arbnumber'", "'extensions/EXT/EXT_texture_compression_astc_decode_mode.txt', 'alias' : { 'GL_EXT_texture_compression_astc_decode_mode_rgb9e5' }, }, 'GL_EXT_texture_compression_bptc' : { 'esnumber' : 287,", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_create_context.txt', 'comments' :", "'number' : 232, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": 347, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url'", "{ 'public' }, 'supporters' : { 'INTEL' }, 'url' : 'extensions/INTEL/INTEL_parallel_arrays.txt', }, 'GL_INTEL_performance_query'", ": { 'incomplete', 'private' }, 'url' : 'drafts/MTK/MTK_shader_binary.txt', }, 'GL_NVX_blend_equation_advanced_multi_draw_buffers' : { 'number'", ": 182, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_atomic_counter_ops.txt', }, 'GL_ARB_shader_atomic_counters' :", "}, 'WGL_ATI_pixel_format_float' : { 'number' : 278, 'flags' : { 'public' }, 'supporters'", ": 'extensions/ARB/ARB_point_parameters.txt', }, 'GL_ARB_point_sprite' : { 'arbnumber' : 35, 'flags' : { 'public'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_swap_control.txt', }, 'GL_SGI_texture_color_table' : {", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_texture_texture4.txt', }, 'GL_AMD_transform_feedback3_lines_triangles' : { 'number'", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_list_priority.txt', }, 'GL_SGIX_mpeg1' : {", ": 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : { 'number' : 516, 'esnumber' : 294, 'flags'", "}, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt', }, 'GL_ARB_internalformat_query' : { 'arbnumber' : 112, 'flags' :", "other vendor extensions, but shipped as ARB_transform_feedback2.', }, 'GL_EXT_unpack_subimage' : { 'esnumber' :", "{ 'public' }, 'url' : 'extensions/NV/NV_conservative_raster_pre_snap_triangles.txt', }, 'GL_NV_conservative_raster_underestimation' : { 'number' : 518,", "'extensions/AMD/AMD_texture_gather_bias_lod.txt', }, 'GL_AMD_texture_texture4' : { 'number' : 362, 'flags' : { 'public' },", "'TransGaming' }, 'url' : 'extensions/EXT/EXT_direct_state_access.txt', }, 'GL_EXT_discard_framebuffer' : { 'esnumber' : 64, 'flags'", "'GL_SGIX_texture_multi_buffer' : { 'number' : 53, 'flags' : { 'public' }, 'supporters' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : { 'number' :", "}, 'url' : 'extensions/ARB/ARB_compute_variable_group_size.txt', }, 'GL_ARB_conditional_render_inverted' : { 'arbnumber' : 161, 'flags' :", "'GL_ARB_draw_buffers_blend' : { 'arbnumber' : 69, 'flags' : { 'public' }, 'url' :", "{ 'number' : 162, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", "'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : { 'arbnumber' : 32, 'flags' : {", "'number' : 372, 'flags' : { 'public' }, 'supporters' : { 'APPLE' },", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : { 'flags' : { 'incomplete',", "{ 'arbnumber' : 155, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', },", "'GL_EXT_rescale_normal' : { 'number' : 27, 'flags' : { 'public' }, 'supporters' :", "{ 'number' : 118, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_visual_select_group.txt', },", "'public' }, 'url' : 'extensions/ARB/ARB_create_context_no_error.txt', 'comments' : 'Shares extension spec with WGL_ARB_create_context_no_error.', 'alias'", "'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' :", "'number' : 189, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", ": { 'public' }, 'url' : 'extensions/OES/OES_point_sprite.txt', }, 'GL_OES_primitive_bounding_box' : { 'esnumber' :", "}, 'GL_OES_vertex_half_float' : { 'esnumber' : 38, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'supporters' : { 'NVIDIA', 'TransGaming' }, 'url' : 'extensions/EXT/EXT_separate_shader_objects.gl.txt',", "'extensions/NV/NV_texture_array.txt', }, 'GL_NV_texture_barrier' : { 'number' : 381, 'esnumber' : 271, 'flags' :", "'url' : 'extensions/ANGLE/ANGLE_framebuffer_blit.txt', }, 'GL_ANGLE_framebuffer_multisample' : { 'esnumber' : 84, 'flags' : {", "'number' : 143, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "{ 'number' : 125, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", ": { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_vector_ops.txt', }, 'GL_SGIX_vertex_array_object' : { 'flags' :", "'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : { 'number' : 476, 'esnumber' : 237,", "'GL_IGLOO_toggle_color_and_lightXXX' : { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_toggle_color_and_lightXXX.txt', },", ": { 'number' : 32, 'flags' : { 'public' }, 'supporters' : {", ": { 'public' }, 'url' : 'extensions/EXT/EXT_clip_cull_distance.txt', }, 'GL_EXT_clip_volume_hint' : { 'number' :", ": { 'arbnumber' : 10, 'flags' : { 'public' }, 'supporters' : {", "'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_coverage_sample.txt', }, 'GL_NV_deep_texture3D' : { 'number'", "'number' : 151, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI' },", "'arbnumber' : 187, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_sparse_texture_clamp.txt', }, 'GL_ARB_spirv_extensions'", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt',", "}, 'url' : 'extensions/ARB/ARB_sample_locations.txt', }, 'GL_ARB_sample_shading' : { 'arbnumber' : 70, 'flags' :", "'url' : 'extensions/SGIX/SGIX_texture_add_env.txt', }, 'GL_SGIX_texture_coordinate_clamp' : { 'number' : 235, 'flags' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_3dvision_settings.txt', }, 'GL_NV_EGL_stream_consumer_external' : { 'esnumber'", ": { 'public' }, 'url' : 'extensions/NV/NV_shadow_samplers_array.txt', }, 'GL_NV_shadow_samplers_cube' : { 'esnumber' :", "'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : { 'number' : 169, 'flags' : { 'public' },", "{ 'ARB' }, 'url' : 'extensions/ARB/ARB_occlusion_query.txt', }, 'GL_ARB_occlusion_query2' : { 'arbnumber' : 80,", ": { 'esnumber' : 30, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_rgb8_rgba8.txt',", "}, 'supporters' : { 'MESA' }, 'url' : 'extensions/AMD/AMD_gpu_shader_half_float.txt', }, 'GL_AMD_gpu_shader_half_float_fetch' : {", "'extensions/EXT/EXT_packed_depth_stencil.txt', }, 'GL_EXT_packed_float' : { 'number' : 328, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt',", "}, 'WGL_ARB_render_texture' : { 'arbnumber' : 20, 'flags' : { 'public' }, 'supporters'", "}, 'GLX_SGI_cushion' : { 'number' : 62, 'flags' : { 'public' }, 'supporters'", "}, 'url' : 'extensions/EXT/EXT_draw_transform_feedback.txt', }, 'GL_EXT_external_buffer' : { 'number' : 508, 'esnumber' :", "'url' : 'extensions/OES/OES_texture_mirrored_repeat.txt', }, 'GL_OES_texture_npot' : { 'esnumber' : 37, 'flags' : {", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_filter_anisotropic.txt', }, 'GL_EXT_texture_filter_minmax' : { 'number'", "'url' : 'extensions/NV/NV_path_rendering.txt', }, 'GL_NV_path_rendering_shared_edge' : { 'number' : 471, 'esnumber' : 234,", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', },", ": { 'number' : 59, 'flags' : { 'incomplete' }, 'supporters' : {", "{ 'arbnumber' : 185, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_packed_float.txt', 'alias' : {", "}, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', }, 'GLX_MESA_copy_sub_buffer' : { 'number' : 215, 'flags' :", "'url' : 'extensions/EXT/EXT_histogram.txt', }, 'GLX_EXT_import_context' : { 'number' : 47, 'flags' : {", "}, 'supporters' : { 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_flush_buffer_range.txt', }, 'GL_APPLE_framebuffer_multisample' : {", "'public' }, 'url' : 'extensions/OES/OES_texture_view.txt', }, 'GL_OES_vertex_array_object' : { 'esnumber' : 71, 'flags'", "'url' : 'extensions/ARB/ARB_shadow.txt', }, 'GL_ARB_shadow_ambient' : { 'arbnumber' : 24, 'flags' : {", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range2.txt', }, 'GL_NV_vertex_attrib_integer_64bit' : {", "{ 'public' }, 'url' : 'extensions/EXT/EXT_sparse_texture2.txt', }, 'GL_EXT_static_vertex_array' : { 'flags' : {", "'esnumber' : 137, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings'", "{ 'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_range.txt', }, 'GL_APPLE_vertex_program_evaluators' : { 'number' : 369,", "'url' : 'extensions/EXT/EXT_texture_snorm.txt', }, 'GL_EXT_texture_storage' : { 'esnumber' : 108, 'flags' : {", "'number' : 238, 'flags' : { 'public' }, 'supporters' : { 'KHR' },", ": { 'esnumber' : 149, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_texture_border_clamp.txt',", "'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber' : 177, 'flags' : {", "357, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_packed_depth_stencil.txt', }, 'GL_NV_packed_float' : { 'esnumber' :", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_transform_hint.txt', }, 'GL_APPLE_vertex_array_object' : { 'number' : 273, 'flags'", "{ 'public' }, 'url' : 'extensions/IMG/IMG_program_binary.txt', }, 'GL_IMG_read_format' : { 'esnumber' : 53,", ": { 'number' : 487, 'esnumber' : 262, 'flags' : { 'public' },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt',", "'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_binding.txt', }, 'GL_ARB_vertex_blend' : { 'arbnumber' : 15, 'flags'", "'url' : 'extensions/SGIX/SGIX_fragment_lighting_space.txt', }, 'GL_SGIX_fragment_specular_lighting' : { 'flags' : { 'incomplete', 'public' },", "'url' : 'extensions/EXT/EXT_sRGB.txt', }, 'GL_EXT_sRGB_write_control' : { 'esnumber' : 153, 'flags' : {", "'GL_EXT_draw_buffers_indexed' : { 'esnumber' : 176, 'flags' : { 'public' }, 'url' :", "'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_resize_buffers.txt', }, 'GLX_MESA_set_3dfx_mode' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_fragment_program4.txt', }, 'GL_NV_fragment_program_option' : { 'number' :", "}, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : {", "{ 'esnumber' : 127, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_packed_float.txt', },", ": { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_agp_offset.txt', },", "'extensions/NV/NV_framebuffer_blit.txt', }, 'GL_NV_framebuffer_mixed_samples' : { 'number' : 469, 'esnumber' : 231, 'flags' :", "- see arbnumber 74.', }, 'WGL_ARB_create_context_profile' : { 'arbnumber' : 74, 'flags' :", ": 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : { 'number' : 281, 'flags' : { 'public'", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture'", "{ 'arbnumber' : 154, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_indirect_parameters.txt', },", "'supporters' : { 'IBM', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_abgr.txt', }, 'GL_EXT_base_instance'", ": 'extensions/EXT/EXT_blend_func_separate.txt', }, 'GL_EXT_blend_logic_op' : { 'number' : 39, 'flags' : { 'public'", "'esnumber' : 108, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_storage.txt', }, 'GL_EXT_texture_swizzle'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : { 'number'", "'GL_OES_fbo_render_mipmap' : { 'esnumber' : 27, 'flags' : { 'public' }, 'url' :", "'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation' }, }, 'WGL_ATI_pixel_format_float' : { 'number' : 278,", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_vertex_array_object.txt', }, 'GL_ARB_vertex_attrib_64bit' :", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/WGL_NV_video_output.txt',", "'number' : 56, 'flags' : { 'public' }, 'supporters' : { 'HP', 'SGI'", "'GL_SGIS_sharpen_texture' : { 'number' : 22, 'flags' : { 'public' }, 'supporters' :", ": { 'esnumber' : 253, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt',", "{ 'esnumber' : 293, 'flags' : { 'public' }, 'url' : 'extensions/QCOM/QCOM_texture_foveated.txt', },", "{ 'number' : 476, 'esnumber' : 237, 'flags' : { 'public' }, 'url'", ": { 'esnumber' : 43, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_depth_texture.txt',", ": 'extensions/S3/S3_s3tc.txt', }, 'GLX_SGIS_blended_overlay' : { 'number' : 142, 'flags' : { 'public'", ": 252, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_polygon_offset_clamp.txt', }, 'GL_EXT_post_depth_coverage' :", "}, 'GL_SGIX_mpeg2' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_mpeg2.txt', },", "'extensions/SGIX/GLX_SGIX_pbuffer.txt', }, 'GL_SGIX_pixel_texture' : { 'number' : 499, 'flags' : { 'public' },", "{ 'public' }, 'url' : 'extensions/OES/OES_draw_texture.txt', }, 'GL_OES_element_index_uint' : { 'esnumber' : 26,", "'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/GLX_AMD_gpu_association.txt', }, 'GL_AMD_gpu_shader_half_float' :", "217, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : {", ": 'extensions/EXT/EXT_draw_instanced.txt', }, 'GL_EXT_draw_range_elements' : { 'number' : 112, 'flags' : { 'public'", "'GL_EXT_post_depth_coverage' : { 'number' : 461, 'esnumber' : 225, 'flags' : { 'public'", "{ 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' : { 'number' : 348, 'flags' :", ": 65, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url'", ": { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_slim.txt', }, 'GL_SGIX_spotlight_cutoff' :", "'GLX_OML_swap_method' : { 'number' : 237, 'flags' : { 'public' }, 'supporters' :", "'extensions/SGIX/SGIX_fog_factor_to_alpha.txt', }, 'GL_SGIX_fog_layers' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_fog_layers.txt',", ": 'extensions/EXT/EXT_texture_shared_exponent.txt', }, 'GL_EXT_texture_snorm' : { 'number' : 365, 'flags' : { 'public'", "'public' }, 'url' : 'extensions/ARB/ARB_texture_query_levels.txt', }, 'GL_ARB_texture_query_lod' : { 'arbnumber' : 73, 'flags'", ": 'extensions/KHR/KHR_blend_equation_advanced.txt', 'alias' : { 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' : { 'arbnumber' :", "'url' : 'extensions/IMG/IMG_texture_compression_pvrtc.txt', }, 'GL_IMG_texture_compression_pvrtc2' : { 'esnumber' : 140, 'flags' : {", ": { 'incomplete', 'public' }, 'url' : 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : { 'number'", ": 'extensions/KHR/KHR_robust_buffer_access_behavior.txt', }, 'GL_KHR_robustness' : { 'arbnumber' : 170, 'esnumber' : 190, 'flags'", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_query_resource.txt', }, 'GL_NV_query_resource_tag' : { 'number' : 512,", "'url' : 'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : { 'number' : 24, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_ycbcr_texture.txt',", "}, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', }, 'GLX_SGIX_video_source' : { 'number' : 43, 'flags' :", "{ 'arbnumber' : 99, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_vertex_attrib_64bit.txt', },", "'number' : 510, 'esnumber' : 285, 'flags' : { 'public' }, 'supporters' :", ": 8, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "31, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_misc_attribute.txt', }, 'GL_EXT_multi_draw_arrays' : {", ": { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_vertex_weighting.txt', }, 'GLX_EXT_visual_info' : { 'number' :", "'flags' : { 'public' }, 'supporters' : { 'INTEL', 'NVIDIA' }, 'url' :", "'SGI' }, 'url' : 'extensions/SGIS/SGIS_texture_select.txt', }, 'GL_SGIX_async' : { 'number' : 132, 'flags'", "'public' }, 'url' : 'extensions/OES/OES_matrix_palette.txt', }, 'GL_OES_packed_depth_stencil' : { 'esnumber' : 44, 'flags'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_compute_shader.txt', }, 'GL_ARB_compute_variable_group_size' : { 'arbnumber' : 153,", ": { 'number' : 198, 'esnumber' : 154, 'flags' : { 'public' },", "'url' : 'extensions/NVX/NVX_gpu_memory_info.txt', }, 'GL_NVX_linked_gpu_multicast' : { 'number' : 493, 'flags' : {", ": { 'public' }, 'url' : 'extensions/ARB/ARB_shading_language_420pack.txt', }, 'GL_ARB_shading_language_include' : { 'arbnumber' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_robustness_application_isolation.txt', 'alias' : { 'WGL_ARB_robustness_share_group_isolation' },", "{ 'number' : 89, 'flags' : { 'incomplete' }, 'supporters' : { 'SGI'", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt', }, 'GL_NV_vertex_array_range2' : { 'number' :", "{ 'number' : 53, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_swap_control.txt', }, 'GLX_EXT_swap_control_tear'", ": 141, 'flags' : { 'public' }, 'supporters' : { '3DFX', '3DL', 'SGI'", "{ 'public' }, 'url' : 'extensions/OES/OES_mapbuffer.txt', }, 'GL_OES_matrix_get' : { 'esnumber' : 11,", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_texture_phase.txt', }, 'GL_SGIX_texture_range' : { 'number' : 181, 'flags'", "'extensions/ARB/ARB_point_sprite.txt', }, 'GL_ARB_polygon_offset_clamp' : { 'arbnumber' : 193, 'flags' : { 'public' },", "519, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' :", "'flags' : { 'public' }, 'supporters' : { 'IdSoftware', 'NVIDIA' }, 'url' :", "}, 'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_digital_video_control.txt', }, 'WGL_I3D_gamma' : {", ": 506, 'esnumber' : 283, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_win32_keyed_mutex.txt',", ": 'extensions/EXT/EXT_vertex_array_setXXX.txt', }, 'GL_EXT_vertex_attrib_64bit' : { 'number' : 387, 'flags' : { 'public'", "'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/GLX_MESA_release_buffers.txt', }, 'GL_MESA_resize_buffers' : { 'number'", "}, 'url' : 'extensions/NV/NV_viewport_array.txt', }, 'GL_NV_viewport_array2' : { 'number' : 476, 'esnumber' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : {", "{ 'arbnumber' : 57, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_uniform_buffer_object.txt', },", "'public' }, 'url' : 'extensions/EXT/EXT_separate_specular_color.txt', }, 'GL_EXT_shader_framebuffer_fetch' : { 'number' : 520, 'esnumber'", "'url' : 'extensions/EXT/EXT_fog_coord.txt', }, 'GL_EXT_frag_depth' : { 'esnumber' : 86, 'flags' : {", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_pixel_buffer_object.txt', }, 'GL_ARB_point_parameters' : {", "'extensions/OES/OES_texture_cube_map_array.txt', }, 'GL_OES_texture_env_crossbar' : { 'esnumber' : 21, 'flags' : { 'public' },", ": { 'public' }, 'supporters' : { 'INGR', 'KGC', 'SGI', 'SUN' }, 'url'", ": { 'esnumber' : 291, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_mirror_clamp_to_edge.txt',", ": 'extensions/SGIS/SGIS_point_line_texgen.txt', }, 'GL_SGIS_shared_multisample' : { 'number' : 143, 'flags' : { 'incomplete'", "'incomplete' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_typeXXX.txt', }, 'GL_SGIX_complex_polar' : { 'flags' : { 'incomplete'", ": 138, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_stencil_texturing.txt', }, 'GL_ARB_sync' :", "134, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_program_interface_query.txt', }, 'GL_ARB_provoking_vertex' : {", "}, 'GL_ARB_clear_texture' : { 'arbnumber' : 145, 'flags' : { 'public' }, 'url'", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_texture_compression_vtc.txt', }, 'GL_NV_texture_env_combine4' : { 'number'", "'GL_SGI_complex' : { 'number' : 87, 'flags' : { 'incomplete' }, 'supporters' :", "{ 'esnumber' : 19, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_stencil_wrap.txt', },", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt',", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_shader.txt', }, 'GL_ARB_fragment_shader_interlock' : { 'arbnumber'", "'public' }, 'supporters' : { 'IBM', 'SGI' }, 'url' : 'extensions/EXT/GLX_EXT_import_context.txt', }, 'GL_EXT_index_array_formats'", "'incomplete', 'public' }, 'url' : 'extensions/SGIS/SGIS_texture_color_mask.txt', }, 'GL_SGIS_texture_edge_clamp' : { 'number' : 35,", "{ 'incomplete', 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_video_resize_float.txt', },", ": 113, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_map_buffer_alignment.txt', }, 'GL_ARB_map_buffer_range' :", ": { 'ATI', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_add.txt', }, 'GL_EXT_texture_env_combine' : { 'number'", "'alias' : { 'WGL_ARB_create_context_no_error' }, }, 'GLX_ARB_create_context_profile' : { 'arbnumber' : 75, 'flags'", ": 159, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_explicit_attrib_location.txt', }, 'GL_NV_explicit_multisample' :", "'WGL_ARB_robustness_application_isolation' : { 'arbnumber' : 143, 'flags' : { 'public' }, 'url' :", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_program3.txt', },", "'ARB' }, 'url' : 'extensions/ARB/ARB_transpose_matrix.txt', }, 'GL_ARB_uniform_buffer_object' : { 'arbnumber' : 57, 'flags'", "}, 'url' : 'extensions/SGIX/SGIX_vertex_preclip.txt', 'alias' : { 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt', },", "'GL_SGIX_image_compression' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_image_compression.txt', }, 'GL_SGIX_impact_pixel_texture'", "{ 'arbnumber' : 90, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_subroutine.txt', },", "'MESA' }, 'url' : 'extensions/MESA/MESA_pack_invert.txt', }, 'GLX_MESA_pixmap_colormap' : { 'number' : 216, 'flags'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_texture_lod_bias.txt', }, 'GL_SGIX_texture_mipmap_anisotropic' : { 'flags' : { 'incomplete'", ": 'extensions/NV/NV_query_resource_tag.txt', }, 'GL_NV_read_buffer' : { 'esnumber' : 93, 'flags' : { 'public'", "'public' }, 'supporters' : { 'SUN' }, 'url' : 'extensions/SUN/SUN_vertex.txt', }, 'GL_VIV_shader_binary' :", "'extensions/AMD/AMD_shader_explicit_vertex_parameter.txt', }, 'GL_AMD_shader_image_load_store_lod' : { 'number' : 513, 'flags' : { 'public' },", "'url' : 'extensions/NV/NV_instanced_arrays.txt', }, 'GL_NV_internalformat_sample_query' : { 'number' : 475, 'esnumber' : 196,", "}, 'url' : 'extensions/SGI/SGI_fft.txt', }, 'GLU_SGI_filter4_parameters' : { 'number' : 85, 'flags' :", "{ 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cubemap_per_texture.txt', }, 'GL_ARB_separate_shader_objects' : { 'arbnumber' : 97,", "{ 'number' : 156, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_coordinate_frame.txt', },", "'extensions/ARM/ARM_mali_program_binary.txt', }, 'GL_ARM_mali_shader_binary' : { 'esnumber' : 81, 'flags' : { 'public' },", "}, 'GLX_SGIX_video_resize' : { 'number' : 83, 'flags' : { 'public' }, 'supporters'", "'GL_NV_shader_atomic_float64' : { 'number' : 488, 'flags' : { 'public' }, 'url' :", "}, 'GLX_SGI_video_sync' : { 'number' : 41, 'flags' : { 'public' }, 'supporters'", "'extensions/EXT/EXT_gpu_program_parameters.txt', }, 'GL_EXT_gpu_shader4' : { 'number' : 326, 'flags' : { 'public' },", "'esnumber' : 296, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_stereo_view_rendering.txt', }, 'GLX_NV_swap_group'", "'esnumber' : 185, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_view.txt', }, 'GL_EXT_timer_query'", ": { 'public' }, 'supporters' : { 'IBM', 'SUN' }, 'url' : 'extensions/EXT/EXT_rescale_normal.txt',", "'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_swap_control.txt', }, 'WGL_EXT_swap_control_tear' : {", ": { 'arbnumber' : 95, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES2_compatibility.txt',", "'GL_ARB_sample_locations' : { 'arbnumber' : 181, 'flags' : { 'public' }, 'url' :", "{ 'arbnumber' : 19, 'flags' : { 'public' }, 'supporters' : { 'ARB'", "412, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' :", "'supporters' : { 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_debug_output.txt', }, 'GL_AMD_depth_clamp_separate' : { 'number'", ": 2, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "}, 'url' : 'extensions/EXT/WGL_EXT_colorspace.txt', }, 'WGL_EXT_create_context_es2_profile' : { 'number' : 400, 'flags' :", "}, 'WGL_I3D_gamma' : { 'number' : 251, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/MESA/GLX_MESA_pixmap_colormap.txt', }, 'GL_MESA_program_binary_formats' : { 'number' : 516, 'esnumber' : 294,", "{ 'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints' : { 'number' : 76,", ": { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_spotlight_cutoff.txt', },", "'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', }, 'GL_ARB_transform_feedback_overflow_query' : { 'arbnumber' : 173, 'flags'", "{ 'HP' }, 'url' : 'extensions/HP/HP_texture_lighting.txt', }, 'GL_IBM_cull_vertex' : { 'number' : 199,", "'GL_EXT_texture_cube_map' : { 'flags' : { 'incomplete' }, 'url' : 'extensions/EXT/EXT_texture_cube_map.txt', 'comments' :", "{ 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_dvc.txt', }, 'GLX_SGIX_fbconfig' : { 'number' : 49,", "'public' }, 'url' : 'extensions/EXT/WGL_EXT_display_color_table.txt', }, 'WGL_EXT_extensions_string' : { 'number' : 168, 'flags'", "'GL_ATI_separate_stencil' : { 'number' : 289, 'flags' : { 'public' }, 'supporters' :", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : { 'number' :", "'extensions/SGIS/SGIS_texture_filter4.txt', }, 'GL_SGIS_texture_lod' : { 'number' : 24, 'flags' : { 'public' },", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_program5_mem_extended.txt', }, 'GL_NV_gpu_shader5' :", ": 404, 'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url'", "159, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : {", "'url' : 'extensions/ARB/ARB_shader_image_size.txt', }, 'GL_ARB_shader_objects' : { 'arbnumber' : 30, 'flags' : {", "'number' : 185, 'flags' : { 'public' }, 'supporters' : { 'ATI', 'NVIDIA'", "}, 'url' : 'extensions/OES/OES_depth24.txt', }, 'GL_OES_depth32' : { 'esnumber' : 25, 'flags' :", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIS/SGIS_pixel_texture.txt', }, 'GL_SGIS_point_line_texgen'", "{ 'flags' : { 'incomplete' }, 'url' : 'extensions/SGIX/SGIX_color_matrix_accuracy.txt', }, 'GL_SGIX_color_table_index_mode' : {", "}, 'GL_EXT_window_rectangles' : { 'number' : 490, 'esnumber' : 263, 'flags' : {", "'url' : 'extensions/OES/OES_texture_env_crossbar.txt', }, 'GL_OES_texture_float' : { 'esnumber' : 36, 'flags' : {", "136, 'flags' : { 'public' }, 'supporters' : { 'INTEL' }, 'url' :", "{ 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_name_gen_delete.txt', }, 'GL_AMD_occlusion_query_event' : { 'number' : 442,", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers'", ": 193, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'GL_INGR_interlace_read' : { 'number' : 175, 'flags' : { 'public' }, 'supporters' :", ": 'extensions/NV/NV_depth_buffer_float.txt', }, 'GL_NV_depth_clamp' : { 'number' : 260, 'flags' : { 'public'", "'extensions/NV/NV_video_capture.txt', 'alias' : { 'GLX_NV_video_capture', 'WGL_NV_video_capture' }, }, 'GLX_NV_video_out' : { 'number' :", "{ 'esnumber' : 103, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_texture_rg.txt', },", ": { 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_light_texture.txt', },", ": { 'number' : 433, 'esnumber' : 163, 'flags' : { 'public' },", "'esnumber' : 45, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_standard_derivatives.txt', }, 'GL_OES_stencil1'", "21, 'flags' : { 'public' }, 'supporters' : { 'KGC', 'SGI' }, 'url'", "'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : { 'number'", "}, 'GL_APPLE_texture_packed_float' : { 'esnumber' : 195, 'flags' : { 'public' }, 'url'", ": { 'public' }, 'url' : 'extensions/ARB/ARB_seamless_cube_map.txt', }, 'GL_ARB_seamless_cubemap_per_texture' : { 'arbnumber' :", "495, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "}, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : {", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_path_rendering_shared_edge.txt', }, 'GL_NV_pixel_buffer_object' : { 'esnumber'", "{ 'obsolete' }, 'url' : 'extensions/WIN/WIN_scene_markerXXX.txt', }, 'GL_WIN_specular_fog' : { 'number' : 114,", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_sparse_texture.txt', }, 'GL_AMD_stencil_operation_extended'", "'number' : 102, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "248, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisample_compatibility.txt', }, 'GL_EXT_multisampled_render_to_texture' : {", "'url' : 'extensions/ARB/ARB_texture_env_add.txt', }, 'GL_ARB_texture_env_combine' : { 'arbnumber' : 17, 'flags' : {", "212, 'flags' : { 'public' }, 'supporters' : { 'SGI' }, 'url' :", "{ 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_decimation.txt', }, 'GL_SGIX_depth_pass_instrument'", ": { 'number' : 364, 'flags' : { 'public' }, 'supporters' : {", "{ 'esnumber' : 106, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', },", "{ 'NVIDIA' }, 'url' : 'extensions/NV/NV_occlusion_query.txt', }, 'GL_NV_pack_subimage' : { 'esnumber' : 132,", "}, 'GL_EXT_texture_object' : { 'number' : 20, 'flags' : { 'public' }, 'supporters'", "'url' : 'extensions/NV/NV_shader_buffer_store.txt', }, 'GL_NV_shader_noperspective_interpolation' : { 'esnumber' : 201, 'flags' : {", ": 37, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url'", "'incomplete' }, 'url' : 'extensions/SGIX/SGIX_instrument_error.txt', }, 'GL_SGIX_instruments' : { 'number' : 55, 'flags'", "'extensions/EXT/EXT_coordinate_frame.txt', }, 'GL_EXT_copy_image' : { 'esnumber' : 175, 'flags' : { 'public' },", ": { 'public' }, 'url' : 'extensions/OES/OES_shader_image_atomic.txt', }, 'GL_OES_shader_io_blocks' : { 'esnumber' :", "'public' }, 'supporters' : { 'IdSoftware', 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_swizzle.txt', }, 'GL_EXT_texture_type_2_10_10_10_REV'", "'GL_SGIX_depth_pass_instrument' : { 'number' : 205, 'flags' : { 'incomplete' }, 'supporters' :", "'alias' : { 'GL_SGIX_vertex_preclip_hint' }, }, 'GLX_SGIX_video_resize' : { 'number' : 83, 'flags'", "{ 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4'", "'url' : 'extensions/EXT/EXT_multisampled_render_to_texture.txt', }, 'GL_EXT_multisampled_render_to_texture2' : { 'esnumber' : 275, 'flags' : {", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/GLX_SGIX_color_type.txt', 'alias' : { 'GL_SGIX_color_type'", "}, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_texture_compression.txt', }, 'GL_ARB_texture_compression_bptc' : {", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_vertex_array_range.txt', },", "{ 'arbnumber' : 105, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_robustness.txt', },", "{ 'esnumber' : 58, 'flags' : { 'public' }, 'url' : 'extensions/IMG/IMG_texture_env_enhanced_fixed_function.txt', },", "}, 'GL_OES_element_index_uint' : { 'esnumber' : 26, 'flags' : { 'public' }, 'url'", "{ 'NVIDIA' }, 'url' : 'extensions/EXT/EXT_shadow_funcs.txt', }, 'GL_EXT_shadow_samplers' : { 'esnumber' : 102,", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_calligraphic_fragment.txt', }, 'GL_SGIX_clipmap' :", "'extensions/NV/NV_shader_atomic_int64.txt', }, 'GL_NV_shader_buffer_load' : { 'number' : 379, 'flags' : { 'public' },", ": { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers' : { 'esnumber' :", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_atomic_counter_ops.txt', }, 'GL_AMD_shader_ballot' : {", ": 'extensions/EXT/EXT_draw_buffers.txt', }, 'GL_EXT_draw_buffers2' : { 'number' : 340, 'flags' : { 'public'", "'flags' : { 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/EXT/WGL_EXT_depth_float.txt',", ": { 'number' : 142, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/OES/OES_texture_border_clamp.txt', }, 'GL_OES_texture_buffer' : { 'esnumber' : 216, 'flags' :", "'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_ES3_1_compatibility.txt', }, 'GL_ARB_ES3_2_compatibility' : { 'arbnumber'", "174, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_texture_storage_multisample_2d_array.txt', }, 'GL_OES_texture_view' : {", "'APPLE' }, 'url' : 'extensions/APPLE/APPLE_vertex_array_object.txt', }, 'GL_APPLE_vertex_array_range' : { 'number' : 274, 'flags'", "10, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_framebuffer_object.txt', }, 'GL_OES_geometry_shader' : {", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_framebuffer_object.txt',", "}, 'GL_APPLE_rgb_422' : { 'number' : 373, 'esnumber' : 76, 'flags' : {", "'GL_EXT_copy_texture' : { 'number' : 10, 'flags' : { 'public' }, 'supporters' :", ": { 'AMD' }, 'url' : 'extensions/AMD/AMD_transform_feedback3_lines_triangles.txt', }, 'GL_AMD_transform_feedback4' : { 'number' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_fog_distance.txt', }, 'GL_NV_fragment_coverage_to_color' : { 'number' : 467, 'esnumber'", "'flags' : { 'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_scalebias_hint.txt',", "{ 'number' : 58, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' : { 'number' :", "'KHR' }, 'url' : 'extensions/OML/OML_subsample.txt', }, 'GLX_OML_swap_method' : { 'number' : 237, 'flags'", "'NVIDIA' }, 'url' : 'extensions/EXT/GLX_EXT_swap_control_tear.txt', }, 'GL_EXT_tessellation_shader' : { 'esnumber' : 181, 'flags'", "'number' : 461, 'esnumber' : 225, 'flags' : { 'public' }, 'url' :", "'GL_ARB_vertex_blend' : { 'arbnumber' : 15, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/NV/NV_sample_locations.txt', }, 'GL_NV_sample_mask_override_coverage' : { 'number' : 473,", ": { 'public' }, 'url' : 'extensions/OES/OES_packed_depth_stencil.txt', }, 'GL_OES_paletted_texture' : { 'esnumber' :", ": 'extensions/EXT/WGL_EXT_depth_float.txt', }, 'WGL_EXT_display_color_table' : { 'number' : 167, 'flags' : { 'public'", ": 208, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_copy_image.txt', }, 'GL_OES_depth24' :", "'url' : 'extensions/SGIS/SGIS_texture_edge_clamp.txt', }, 'GL_SGIS_texture_filter4' : { 'number' : 7, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_parameter_buffer_object2.txt',", "'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_instanced_arrays.txt',", "'esnumber' : 264, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_non_constant_global_initializers.txt', }, 'GL_EXT_shader_pixel_local_storage'", ": 253, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_shader_pixel_local_storage2.txt', }, 'GL_EXT_shader_texture_lod' :", "'supporters' : { 'HP', 'INGR', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_blend_color.txt', },", "{ 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render'", "'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_draw_range_elements.txt', }, 'GL_EXT_draw_transform_feedback' : { 'esnumber'", "'extensions/OVR/OVR_multiview_multisampled_render_to_texture.txt', }, 'GL_PGI_misc_hints' : { 'number' : 77, 'flags' : { 'public' },", "'url' : 'extensions/EXT/EXT_shader_image_load_store.txt', }, 'GL_EXT_shader_implicit_conversions' : { 'esnumber' : 179, 'flags' : {", "{ 'public' }, 'url' : 'extensions/OES/OES_texture_float_linear.txt', 'alias' : { 'GL_OES_texture_half_float_linear' }, }, 'GL_OES_texture_mirrored_repeat'", "'url' : 'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : { 'esnumber' : 57, 'flags' : {", "{ 'number' : 163, 'flags' : { 'public' }, 'supporters' : { 'SUN'", "}, 'url' : 'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' : 137, 'flags' :", ": 'extensions/SGIX/SGIX_fragment_specular_lighting.txt', }, 'GL_SGIX_fragments_instrument' : { 'number' : 180, 'flags' : { 'incomplete'", ": 166, 'flags' : { 'public' }, 'supporters' : { 'SUN' }, 'url'", ": { 'GL_EXT_semaphore' }, }, 'GL_EXT_memory_object_fd' : { 'number' : 504, 'esnumber' :", "{ 'public' }, 'supporters' : { 'TGS' }, 'url' : 'extensions/PGI/PGI_misc_hints.txt', }, 'GL_PGI_vertex_hints'", "'GL_ARB_texture_buffer_object_rgb32' : { 'arbnumber' : 92, 'flags' : { 'public' }, 'url' :", "'GL_EXT_float_blend' : { 'esnumber' : 224, 'flags' : { 'public' }, 'url' :", ": { 'number' : 375, 'flags' : { 'public' }, 'supporters' : {", ": { 'number' : 42, 'flags' : { 'public' }, 'supporters' : {", "{ 'number' : 114, 'flags' : { 'public' }, 'supporters' : { 'MS'", "}, 'GL_ARB_shader_atomic_counter_ops' : { 'arbnumber' : 182, 'flags' : { 'public' }, 'url'", "}, 'GL_ARB_texture_mirror_clamp_to_edge' : { 'arbnumber' : 149, 'flags' : { 'public' }, 'url'", "'supporters' : { 'INGR', 'SGI' }, 'url' : 'extensions/EXT/WGL_EXT_extensions_string.txt', }, 'WGL_EXT_make_current_read' : {", "'GL_NV_internalformat_sample_query' : { 'number' : 475, 'esnumber' : 196, 'flags' : { 'public'", ": { 'GL_EXT_semaphore_win32' }, }, 'GL_EXT_float_blend' : { 'esnumber' : 224, 'flags' :", "'extensions/NV/WGL_NV_video_output.txt', }, 'WGL_OML_sync_control' : { 'number' : 242, 'flags' : { 'public' },", "'url' : 'extensions/APPLE/APPLE_texture_2D_limited_npot.txt', }, 'GL_APPLE_texture_format_BGRA8888' : { 'esnumber' : 79, 'flags' : {", "'GL_NV_register_combiners2' : { 'number' : 227, 'flags' : { 'public' }, 'supporters' :", "'GL_NV_vertex_attrib_integer_64bit' : { 'number' : 392, 'flags' : { 'public' }, 'supporters' :", "'extensions/NV/NV_command_list.txt', }, 'GL_NV_compute_program5' : { 'number' : 421, 'flags' : { 'public' },", ": { 'WGL_EXT_create_context_es_profile' }, }, 'WGL_EXT_depth_float' : { 'number' : 177, 'flags' :", "}, 'url' : 'extensions/NV/NV_packed_float.txt', }, 'GL_NV_parameter_buffer_object' : { 'number' : 339, 'flags' :", "'number' : 394, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'GL_NV_non_square_matrices' : { 'esnumber' : 160, 'flags' : { 'public' }, 'url' :", "{ 'SGI' }, 'url' : 'extensions/SGI/GLX_SGI_cushion.txt', }, 'GL_SGI_fft' : { 'number' : 99,", "{ 'number' : 60, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "'number' : 226, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA' },", "'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/WGL_ARB_pbuffer.txt', }, 'WGL_ARB_pixel_format' :", "}, 'GL_ARB_vertex_shader' : { 'arbnumber' : 31, 'flags' : { 'public' }, 'supporters'", "{ 'public' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset'", "'extensions/EXT/EXT_index_material.txt', }, 'GL_EXT_index_texture' : { 'number' : 93, 'flags' : { 'public' },", "'supporters' : { 'APPLE' }, 'url' : 'extensions/EXT/EXT_debug_label.txt', }, 'GL_EXT_debug_marker' : { 'number'", "'GL_EXT_texture' : { 'number' : 4, 'flags' : { 'public' }, 'supporters' :", ": { 'HP', 'KGC', 'SGI', 'SUN' }, 'url' : 'extensions/EXT/EXT_convolution.txt', }, 'GL_EXT_coordinate_frame' :", ": { 'NVIDIA' }, 'url' : 'extensions/NV/NV_transform_feedback2.txt', }, 'GL_NV_uniform_buffer_unified_memory' : { 'number' :", "'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader' : { 'arbnumber'", "'public' }, 'url' : 'extensions/OES/OES_blend_equation_separate.txt', }, 'GL_OES_blend_func_separate' : { 'esnumber' : 2, 'flags'", "232, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_fill_rectangle.txt', }, 'GL_NV_float_buffer' : {", "{ 'public' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/MESA/MESA_shader_integer_functions.txt', }, 'GLX_MESA_swap_control'", ": { 'public' }, 'url' : 'extensions/OES/OES_element_index_uint.txt', }, 'GL_OES_extended_matrix_palette' : { 'esnumber' :", "}, 'url' : 'extensions/SGI/GLX_SGI_transparent_pixel.txt', }, 'GLX_SGI_video_sync' : { 'number' : 41, 'flags' :", "'SGI' }, 'url' : 'extensions/EXT/EXT_index_texture.txt', }, 'GL_EXT_instanced_arrays' : { 'esnumber' : 156, 'flags'", ": 190, 'flags' : { 'public' }, 'url' : 'extensions/KHR/KHR_robustness.txt', }, 'GL_KHR_texture_compression_astc_hdr' :", "'url' : 'extensions/SGIX/SGIX_blend_alpha_minmax.txt', }, 'GL_SGIX_blend_cadd' : { 'number' : 150, 'flags' : {", "'alias' : { 'GL_KHR_blend_equation_advanced_coherent' }, }, 'GL_KHR_context_flush_control' : { 'arbnumber' : 168, 'esnumber'", "'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_color_buffer_float.txt', }, 'GL_EXT_color_buffer_half_float' : { 'esnumber'", "}, 'WGL_ARB_make_current_read' : { 'arbnumber' : 10, 'flags' : { 'public' }, 'supporters'", "'extensions/APPLE/APPLE_texture_range.txt', }, 'GL_APPLE_transform_hint' : { 'number' : 160, 'flags' : { 'public' },", "'extensions/ARB/ARB_shader_stencil_export.txt', }, 'GL_ARB_shader_storage_buffer_object' : { 'arbnumber' : 137, 'flags' : { 'public' },", "'number' : 174, 'flags' : { 'public' }, 'supporters' : { 'INGR' },", "}, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_ir_instrument1.txt', }, 'GL_SGIX_line_quality_hint' : {", "'extensions/AMD/AMD_sample_positions.txt', }, 'GL_AMD_seamless_cubemap_per_texture' : { 'number' : 383, 'flags' : { 'public' },", "{ 'incomplete' }, 'supporters' : { 'MESA' }, 'url' : 'extensions/SGIX/SGIX_igloo_interface.txt', }, 'GL_SGIX_image_compression'", "'esnumber' : 160, 'flags' : { 'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query'", ": { 'esnumber' : 115, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_required_internalformat.txt',", "'flags' : { 'public' }, 'url' : 'extensions/AMD/AMD_program_binary_Z400.txt', }, 'GL_AMD_query_buffer_object' : { 'number'", ": 'extensions/EXT/GLX_EXT_texture_from_pixmap.txt', }, 'GL_EXT_texture_integer' : { 'number' : 343, 'flags' : { 'public'", ": { 'public' }, 'supporters' : { 'INGR' }, 'url' : 'extensions/INGR/INGR_interlace_read.txt', },", ": { 'public' }, 'supporters' : { 'IBM' }, 'url' : 'extensions/IBM/IBM_cull_vertex.txt', },", "'esnumber' : 73, 'flags' : { 'public' }, 'url' : '../EGL/extensions/NV/EGL_NV_depth_nonlinear.txt', }, 'GL_NV_draw_buffers'", "}, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/WGL_AMD_gpu_association.txt', }, 'WGL_ARB_buffer_region' : {", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_fragment_program_shadow.txt', }, 'GL_ARB_fragment_shader'", "}, 'GL_SGIS_detail_texture' : { 'number' : 21, 'flags' : { 'public' }, 'supporters'", ": { 'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/NV_gpu_multicast.txt', },", "{ 'esnumber' : 28, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_fragment_precision_high.txt', },", "'GL_ARB_multi_draw_indirect' : { 'arbnumber' : 133, 'flags' : { 'public' }, 'url' :", "87, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_draw_indirect.txt', }, 'GL_ARB_draw_instanced' : {", "34, 'flags' : { 'public' }, 'supporters' : { 'ARB' }, 'url' :", "{ 'number' : 439, 'esnumber' : 98, 'flags' : { 'public' }, 'supporters'", "'SGI' }, 'url' : 'extensions/SGIX/SGIX_sprite.txt', }, 'GL_SGIX_subdiv_patch' : { 'flags' : { 'incomplete'", "496, 'flags' : { 'public' }, 'supporters' : { 'MESA' }, 'url' :", "}, 'GL_ATI_fragment_shader' : { 'number' : 245, 'flags' : { 'public' }, 'supporters'", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NV/GLX_NV_copy_buffer.txt', }, 'GL_NV_copy_buffer' :", ": { 'number' : 14, 'flags' : { 'public' }, 'supporters' : {", "'number' : 16, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'number' : 81, 'flags' : { 'public' }, 'supporters' : { 'SGI' },", "'extensions/SUN/SUN_slice_accum.txt', }, 'GL_SUN_triangle_list' : { 'number' : 165, 'flags' : { 'public' },", "is referred to by some other vendor extensions, but shipped as ARB_transform_feedback2.', },", "'supporters' : { 'I3D' }, 'url' : 'extensions/I3D/WGL_I3D_image_buffer.txt', }, 'WGL_I3D_swap_frame_lock' : { 'number'", "{ 'public' }, 'url' : 'extensions/ARB/ARB_texture_barrier.txt', }, 'GL_ARB_texture_border_clamp' : { 'arbnumber' : 13,", "'extensions/ARB/ARB_transform_feedback_overflow_query.txt', }, 'GL_ARB_transpose_matrix' : { 'arbnumber' : 3, 'flags' : { 'public' },", "'flags' : { 'public' }, 'url' : 'extensions/APPLE/APPLE_texture_format_BGRA8888.txt', }, 'GL_APPLE_texture_max_level' : { 'esnumber'", ": 185, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_shader_viewport_layer_array.txt', }, 'GL_ARB_shading_language_100' :", ": { 'public' }, 'supporters' : { 'MS' }, 'url' : 'extensions/EXT/EXT_bgra.txt', },", "}, 'url' : 'extensions/I3D/WGL_I3D_swap_frame_usage.txt', }, 'GL_WIN_phong_shading' : { 'number' : 113, 'flags' :", ": 'extensions/NV/NV_fragment_program_option.txt', }, 'GL_NV_fragment_shader_interlock' : { 'number' : 468, 'esnumber' : 230, 'flags'", "{ 'obsolete' }, 'url' : 'extensions/SGIX/SGIX_vertex_array_object.txt', }, 'GL_SGIX_vertex_preclip' : { 'number' : 210,", "'GLX_NV_video_out' : { 'number' : 348, 'flags' : { 'public' }, 'supporters' :", "{ 'public' }, 'url' : 'extensions/ANGLE/ANGLE_pack_reverse_row_order.txt', }, 'GL_ANGLE_program_binary' : { 'esnumber' : 139,", "{ 'ATI' }, 'url' : 'extensions/ATI/ATI_vertex_streams.txt', }, 'GL_DMP_program_binary' : { 'esnumber' : 192,", ": { 'number' : 397, 'flags' : { 'public' }, 'supporters' : {", "280, 'flags' : { 'public' }, 'url' : 'extensions/EXT/EXT_external_objects.txt', 'alias' : { 'GL_EXT_semaphore'", "'flags' : { 'public' }, 'url' : 'extensions/NV/NV_viewport_swizzle.txt', }, 'GL_OES_EGL_image' : { 'esnumber'", "}, 'supporters' : { '3DFX' }, 'url' : 'extensions/3DFX/3DFX_multisample.txt', }, 'GL_3DFX_tbuffer' : {", "'public' }, 'supporters' : { 'NVIDIA' }, 'url' : 'extensions/NVX/NVX_linked_gpu_multicast.txt', }, 'GL_NV_3dvision_settings' :", "'flags' : { 'public' }, 'supporters' : { 'HP', 'KGC', 'SGI', 'SUN' },", "'GL_ARB_gl_spirv' : { 'arbnumber' : 190, 'flags' : { 'public' }, 'url' :", "'supporters' : { 'SGI' }, 'url' : 'extensions/SGIX/SGIX_cube_map.txt', }, 'GL_SGIX_cylinder_texgen' : { 'number'", "}, 'url' : 'extensions/EXT/EXT_geometry_shader4.txt', }, 'GLX_EXT_stereo_tree' : { 'number' : 452, 'flags' :", "}, 'url' : 'extensions/EXT/EXT_external_buffer.txt', }, 'GL_EXT_EGL_image_array' : { 'esnumber' : 278, 'flags' :", "{ 'number' : 340, 'flags' : { 'public' }, 'supporters' : { 'NVIDIA'", ": { 'public' }, 'url' : 'extensions/NV/NV_non_square_matrices.txt', }, 'GL_NV_occlusion_query' : { 'number' :", "{ 'number' : 237, 'flags' : { 'public' }, 'supporters' : { 'KHR'", "{ 'public' }, 'supporters' : { 'ARB' }, 'url' : 'extensions/ARB/ARB_draw_buffers.txt', }, 'GL_ARB_draw_buffers_blend'", "'public' }, 'url' : 'extensions/NV/NV_pixel_buffer_object.txt', }, 'GL_NV_pixel_data_range' : { 'number' : 284, 'flags'", "'public' }, 'url' : 'extensions/ARB/ARB_texture_buffer_range.txt', }, 'GL_ARB_texture_compression' : { 'arbnumber' : 12, 'flags'", "'extensions/NV/NV_vertex_buffer_unified_memory.txt', }, 'GL_NV_vertex_program' : { 'number' : 233, 'flags' : { 'public' },", "{ 'number' : 252, 'flags' : { 'public' }, 'supporters' : { 'I3D'", "}, 'GL_ANGLE_depth_texture' : { 'esnumber' : 138, 'flags' : { 'public' }, 'url'", "'url' : 'extensions/EXT/EXT_point_parameters.txt', }, 'GL_EXT_polygon_offset' : { 'number' : 3, 'flags' : {", "'flags' : { 'public' }, 'supporters' : { 'AMD' }, 'url' : 'extensions/AMD/AMD_shader_trinary_minmax.txt',", ": { 'ATI' }, 'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : { 'number' :", "'url' : 'extensions/ATI/ATI_texture_mirror_once.txt', }, 'GL_ATI_vertex_array_object' : { 'number' : 247, 'flags' : {", "'arbnumber' : 169, 'esnumber' : 189, 'flags' : { 'public' }, 'url' :", "'incomplete' }, 'supporters' : { 'SGI' }, 'url' : 'extensions/SGI/SGI_complex.txt', }, 'GL_SGI_complex_type' :", "'url' : 'extensions/SGIX/GLX_SGIX_fbconfig.txt', }, 'GLX_SGIX_fbconfig_float' : { 'flags' : { 'incomplete' }, 'url'", "'url' : 'extensions/EXT/EXT_stencil_two_side.txt', }, 'GL_EXT_stencil_wrap' : { 'number' : 176, 'flags' : {", "'url' : 'extensions/NVX/NVX_blend_equation_advanced_multi_draw_buffers.txt', }, 'GL_NVX_conditional_render' : { 'number' : 425, 'flags' : {", "'NVIDIA' }, 'url' : 'extensions/EXT/EXT_texture_env_combine.txt', }, 'GL_EXT_texture_env_dot3' : { 'number' : 220, 'flags'", ": { 'flags' : { 'incomplete', 'obsolete' }, 'url' : 'extensions/IGLOO/IGLOO_swap_triangle_strip_vertex_pointerXXX.txt', }, 'GL_IGLOO_toggle_color_and_lightXXX'", ": { 'public' }, 'url' : 'extensions/OES/OES_viewport_array.txt', }, 'GL_OML_interlace' : { 'number' :", ": { 'number' : 307, 'flags' : { 'public' }, 'supporters' : {", "}, 'url' : 'extensions/NV/NV_texture_env_combine4.txt', }, 'GL_NV_texture_expand_normal' : { 'number' : 286, 'flags' :", ": 'extensions/MESA/MESA_window_pos.txt', }, 'GL_MESA_ycbcr_texture' : { 'number' : 301, 'flags' : { 'public'", "{ 'NVIDIA' }, 'url' : 'extensions/AMD/AMD_depth_clamp_separate.txt', }, 'GL_AMD_draw_buffers_blend' : { 'number' : 366,", "'url' : 'extensions/NV/GLX_NV_delay_before_swap.txt', }, 'GL_NV_depth_buffer_float' : { 'number' : 334, 'flags' : {", "}, 'supporters' : { 'HP', 'SGI' }, 'url' : 'extensions/SGIX/SGIX_shadow_ambient.txt', }, 'GL_SGIX_slim' :", "'GLX_EXT_import_context' : { 'number' : 47, 'flags' : { 'public' }, 'supporters' :", "'extensions/IMG/IMG_texture_filter_cubic.txt', }, 'GL_IMG_user_clip_plane' : { 'esnumber' : 57, 'flags' : { 'public' },", "'public' }, 'url' : 'extensions/EXT/EXT_texture_border_clamp.txt', }, 'GL_EXT_texture_buffer' : { 'esnumber' : 183, 'flags'", "'GL_ARB_timer_query' : { 'arbnumber' : 85, 'flags' : { 'public' }, 'url' :", "'NVIDIA' }, 'url' : 'extensions/NV/NV_float_buffer.txt', 'alias' : { 'WGL_NV_float_buffer' }, }, 'GL_NV_fog_distance' :", ": 116, 'flags' : { 'public' }, 'url' : 'extensions/OES/OES_surfaceless_context.txt', }, 'GL_OES_tessellation_shader' :", "{ 'number' : 41, 'flags' : { 'public' }, 'supporters' : { 'SGI'", "{ 'arbnumber' : 102, 'flags' : { 'public' }, 'url' : 'extensions/ARB/WGL_ARB_create_context_robustness.txt', },", "{ 'arbnumber' : 109, 'flags' : { 'public' }, 'url' : 'extensions/ARB/ARB_transform_feedback_instanced.txt', },", "'url' : 'extensions/AMD/AMD_vertex_shader_viewport_index.txt', }, 'GL_ANDROID_extension_pack_es31a' : { 'esnumber' : 187, 'flags' : {", "'GL_NV_compute_program5' : { 'number' : 421, 'flags' : { 'public' }, 'supporters' :" ]
[ "y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from \"synthetic sine\" heteroscedastic noisy function. \"\"\"", "Parameters ---------- x : ndarray 2d numpy ndarray. \"\"\" flat_neg_cos = np.sum(-1 *", "+ 0.1 * np.random.random((num_points,)) return (y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples", "heteroscedastic noisy function. \"\"\" bounds = [0, 15] # x = np.random.uniform(bounds[0], bounds[1],", "Simple dataset of evenly spaced points and identity function (with some randomization) \"\"\"", "function. Parameters ---------- x : ndarray 2d numpy ndarray. \"\"\" flat_neg_cos = np.sum(-1", "1) / x.shape[1] curvy_cos = flat_neg_cos + 0.2 * np.linalg.norm(x, axis=1) curvy_cos =", "5.0) / 10.0 noise = np.random.normal(scale=std) y = f + noise return f,", "randomization) \"\"\" y_true = np.arange(num_points) y_pred = np.arange(num_points) + np.random.random((num_points,)) y_std = np.abs(y_true", "/ x.shape[1] curvy_cos = flat_neg_cos + 0.2 * np.linalg.norm(x, axis=1) curvy_cos = curvy_cos.reshape(-1,", "np.abs(y_true - y_pred) + 0.1 * np.random.random((num_points,)) return (y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10):", "= 0.01 + np.abs(x - 5.0) / 10.0 noise = np.random.normal(scale=std) y =", "importing and generating data. \"\"\" import numpy as np def synthetic_arange_random(num_points=10): \"\"\" Simple", "x = np.random.uniform(bounds[0], bounds[1], n_points) x = np.linspace(bounds[0], bounds[1], n_points) f = np.sin(x)", "and identity function (with some randomization) \"\"\" y_true = np.arange(num_points) y_pred = np.arange(num_points)", "= np.abs(y_true - y_pred) + 0.1 * np.random.random((num_points,)) return (y_pred, y_std, y_true) def", "= np.random.normal(scale=std) y = f + noise return f, std, y, x def", "x def curvy_cosine(x): \"\"\" Curvy cosine function. Parameters ---------- x : ndarray 2d", "\"\"\" import numpy as np def synthetic_arange_random(num_points=10): \"\"\" Simple dataset of evenly spaced", "np.linspace(bounds[0], bounds[1], n_points) f = np.sin(x) std = 0.01 + np.abs(x - 5.0)", "f + noise return f, std, y, x def curvy_cosine(x): \"\"\" Curvy cosine", "x.shape[1] curvy_cos = flat_neg_cos + 0.2 * np.linalg.norm(x, axis=1) curvy_cos = curvy_cos.reshape(-1, 1)", "15] # x = np.random.uniform(bounds[0], bounds[1], n_points) x = np.linspace(bounds[0], bounds[1], n_points) f", "return f, std, y, x def curvy_cosine(x): \"\"\" Curvy cosine function. Parameters ----------", "* np.cos(x), 1) / x.shape[1] curvy_cos = flat_neg_cos + 0.2 * np.linalg.norm(x, axis=1)", "y_pred) + 0.1 * np.random.random((num_points,)) return (y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return", "samples from \"synthetic sine\" heteroscedastic noisy function. \"\"\" bounds = [0, 15] #", "def synthetic_arange_random(num_points=10): \"\"\" Simple dataset of evenly spaced points and identity function (with", "synthetic_arange_random(num_points=10): \"\"\" Simple dataset of evenly spaced points and identity function (with some", "= flat_neg_cos + 0.2 * np.linalg.norm(x, axis=1) curvy_cos = curvy_cos.reshape(-1, 1) return curvy_cos", "y, x def curvy_cosine(x): \"\"\" Curvy cosine function. Parameters ---------- x : ndarray", "bounds[1], n_points) x = np.linspace(bounds[0], bounds[1], n_points) f = np.sin(x) std = 0.01", "Code for importing and generating data. \"\"\" import numpy as np def synthetic_arange_random(num_points=10):", "bounds[1], n_points) f = np.sin(x) std = 0.01 + np.abs(x - 5.0) /", "10.0 noise = np.random.normal(scale=std) y = f + noise return f, std, y,", "curvy_cos = flat_neg_cos + 0.2 * np.linalg.norm(x, axis=1) curvy_cos = curvy_cos.reshape(-1, 1) return", "y_pred = np.arange(num_points) + np.random.random((num_points,)) y_std = np.abs(y_true - y_pred) + 0.1 *", "noisy function. \"\"\" bounds = [0, 15] # x = np.random.uniform(bounds[0], bounds[1], n_points)", "0.1 * np.random.random((num_points,)) return (y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from", "\"\"\" Simple dataset of evenly spaced points and identity function (with some randomization)", "Return samples from \"synthetic sine\" heteroscedastic noisy function. \"\"\" bounds = [0, 15]", "synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from \"synthetic sine\" heteroscedastic noisy function. \"\"\" bounds =", "of evenly spaced points and identity function (with some randomization) \"\"\" y_true =", "<filename>uncertainty_toolbox/data.py \"\"\" Code for importing and generating data. \"\"\" import numpy as np", "y_std = np.abs(y_true - y_pred) + 0.1 * np.random.random((num_points,)) return (y_pred, y_std, y_true)", "and generating data. \"\"\" import numpy as np def synthetic_arange_random(num_points=10): \"\"\" Simple dataset", "np.abs(x - 5.0) / 10.0 noise = np.random.normal(scale=std) y = f + noise", "dataset of evenly spaced points and identity function (with some randomization) \"\"\" y_true", ": ndarray 2d numpy ndarray. \"\"\" flat_neg_cos = np.sum(-1 * np.cos(x), 1) /", "= np.arange(num_points) y_pred = np.arange(num_points) + np.random.random((num_points,)) y_std = np.abs(y_true - y_pred) +", "+ np.abs(x - 5.0) / 10.0 noise = np.random.normal(scale=std) y = f +", "std = 0.01 + np.abs(x - 5.0) / 10.0 noise = np.random.normal(scale=std) y", "curvy_cosine(x): \"\"\" Curvy cosine function. Parameters ---------- x : ndarray 2d numpy ndarray.", "numpy as np def synthetic_arange_random(num_points=10): \"\"\" Simple dataset of evenly spaced points and", "+ noise return f, std, y, x def curvy_cosine(x): \"\"\" Curvy cosine function.", "= f + noise return f, std, y, x def curvy_cosine(x): \"\"\" Curvy", "= [0, 15] # x = np.random.uniform(bounds[0], bounds[1], n_points) x = np.linspace(bounds[0], bounds[1],", "np.sin(x) std = 0.01 + np.abs(x - 5.0) / 10.0 noise = np.random.normal(scale=std)", "= np.linspace(bounds[0], bounds[1], n_points) f = np.sin(x) std = 0.01 + np.abs(x -", "identity function (with some randomization) \"\"\" y_true = np.arange(num_points) y_pred = np.arange(num_points) +", "0.01 + np.abs(x - 5.0) / 10.0 noise = np.random.normal(scale=std) y = f", "+ np.random.random((num_points,)) y_std = np.abs(y_true - y_pred) + 0.1 * np.random.random((num_points,)) return (y_pred,", "numpy ndarray. \"\"\" flat_neg_cos = np.sum(-1 * np.cos(x), 1) / x.shape[1] curvy_cos =", "x = np.linspace(bounds[0], bounds[1], n_points) f = np.sin(x) std = 0.01 + np.abs(x", "f = np.sin(x) std = 0.01 + np.abs(x - 5.0) / 10.0 noise", "sine\" heteroscedastic noisy function. \"\"\" bounds = [0, 15] # x = np.random.uniform(bounds[0],", "np def synthetic_arange_random(num_points=10): \"\"\" Simple dataset of evenly spaced points and identity function", "for importing and generating data. \"\"\" import numpy as np def synthetic_arange_random(num_points=10): \"\"\"", "* np.random.random((num_points,)) return (y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from \"synthetic", "x : ndarray 2d numpy ndarray. \"\"\" flat_neg_cos = np.sum(-1 * np.cos(x), 1)", "ndarray 2d numpy ndarray. \"\"\" flat_neg_cos = np.sum(-1 * np.cos(x), 1) / x.shape[1]", "function (with some randomization) \"\"\" y_true = np.arange(num_points) y_pred = np.arange(num_points) + np.random.random((num_points,))", "y_true = np.arange(num_points) y_pred = np.arange(num_points) + np.random.random((num_points,)) y_std = np.abs(y_true - y_pred)", "spaced points and identity function (with some randomization) \"\"\" y_true = np.arange(num_points) y_pred", "np.arange(num_points) y_pred = np.arange(num_points) + np.random.random((num_points,)) y_std = np.abs(y_true - y_pred) + 0.1", "= np.sum(-1 * np.cos(x), 1) / x.shape[1] curvy_cos = flat_neg_cos + 0.2 *", "noise = np.random.normal(scale=std) y = f + noise return f, std, y, x", "y = f + noise return f, std, y, x def curvy_cosine(x): \"\"\"", "std, y, x def curvy_cosine(x): \"\"\" Curvy cosine function. Parameters ---------- x :", "from \"synthetic sine\" heteroscedastic noisy function. \"\"\" bounds = [0, 15] # x", "- y_pred) + 0.1 * np.random.random((num_points,)) return (y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\"", "def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from \"synthetic sine\" heteroscedastic noisy function. \"\"\" bounds", "np.sum(-1 * np.cos(x), 1) / x.shape[1] curvy_cos = flat_neg_cos + 0.2 * np.linalg.norm(x,", "\"\"\" Code for importing and generating data. \"\"\" import numpy as np def", "\"\"\" bounds = [0, 15] # x = np.random.uniform(bounds[0], bounds[1], n_points) x =", "bounds = [0, 15] # x = np.random.uniform(bounds[0], bounds[1], n_points) x = np.linspace(bounds[0],", "data. \"\"\" import numpy as np def synthetic_arange_random(num_points=10): \"\"\" Simple dataset of evenly", "= np.sin(x) std = 0.01 + np.abs(x - 5.0) / 10.0 noise =", "cosine function. Parameters ---------- x : ndarray 2d numpy ndarray. \"\"\" flat_neg_cos =", "flat_neg_cos = np.sum(-1 * np.cos(x), 1) / x.shape[1] curvy_cos = flat_neg_cos + 0.2", "\"\"\" Return samples from \"synthetic sine\" heteroscedastic noisy function. \"\"\" bounds = [0,", "import numpy as np def synthetic_arange_random(num_points=10): \"\"\" Simple dataset of evenly spaced points", "np.cos(x), 1) / x.shape[1] curvy_cos = flat_neg_cos + 0.2 * np.linalg.norm(x, axis=1) curvy_cos", "\"\"\" flat_neg_cos = np.sum(-1 * np.cos(x), 1) / x.shape[1] curvy_cos = flat_neg_cos +", "n_points) x = np.linspace(bounds[0], bounds[1], n_points) f = np.sin(x) std = 0.01 +", "- 5.0) / 10.0 noise = np.random.normal(scale=std) y = f + noise return", "/ 10.0 noise = np.random.normal(scale=std) y = f + noise return f, std,", "[0, 15] # x = np.random.uniform(bounds[0], bounds[1], n_points) x = np.linspace(bounds[0], bounds[1], n_points)", "function. \"\"\" bounds = [0, 15] # x = np.random.uniform(bounds[0], bounds[1], n_points) x", "as np def synthetic_arange_random(num_points=10): \"\"\" Simple dataset of evenly spaced points and identity", "(with some randomization) \"\"\" y_true = np.arange(num_points) y_pred = np.arange(num_points) + np.random.random((num_points,)) y_std", "= np.arange(num_points) + np.random.random((num_points,)) y_std = np.abs(y_true - y_pred) + 0.1 * np.random.random((num_points,))", "np.random.random((num_points,)) return (y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from \"synthetic sine\"", "= np.random.uniform(bounds[0], bounds[1], n_points) x = np.linspace(bounds[0], bounds[1], n_points) f = np.sin(x) std", "(y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from \"synthetic sine\" heteroscedastic noisy", "generating data. \"\"\" import numpy as np def synthetic_arange_random(num_points=10): \"\"\" Simple dataset of", "def curvy_cosine(x): \"\"\" Curvy cosine function. Parameters ---------- x : ndarray 2d numpy", "np.random.uniform(bounds[0], bounds[1], n_points) x = np.linspace(bounds[0], bounds[1], n_points) f = np.sin(x) std =", "y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from \"synthetic sine\" heteroscedastic noisy function.", "np.random.random((num_points,)) y_std = np.abs(y_true - y_pred) + 0.1 * np.random.random((num_points,)) return (y_pred, y_std,", "\"synthetic sine\" heteroscedastic noisy function. \"\"\" bounds = [0, 15] # x =", "np.arange(num_points) + np.random.random((num_points,)) y_std = np.abs(y_true - y_pred) + 0.1 * np.random.random((num_points,)) return", "ndarray. \"\"\" flat_neg_cos = np.sum(-1 * np.cos(x), 1) / x.shape[1] curvy_cos = flat_neg_cos", "2d numpy ndarray. \"\"\" flat_neg_cos = np.sum(-1 * np.cos(x), 1) / x.shape[1] curvy_cos", "return (y_pred, y_std, y_true) def synthetic_sine_heteroscedastic(n_points=10): \"\"\" Return samples from \"synthetic sine\" heteroscedastic", "f, std, y, x def curvy_cosine(x): \"\"\" Curvy cosine function. Parameters ---------- x", "np.random.normal(scale=std) y = f + noise return f, std, y, x def curvy_cosine(x):", "\"\"\" Curvy cosine function. Parameters ---------- x : ndarray 2d numpy ndarray. \"\"\"", "Curvy cosine function. Parameters ---------- x : ndarray 2d numpy ndarray. \"\"\" flat_neg_cos", "---------- x : ndarray 2d numpy ndarray. \"\"\" flat_neg_cos = np.sum(-1 * np.cos(x),", "points and identity function (with some randomization) \"\"\" y_true = np.arange(num_points) y_pred =", "evenly spaced points and identity function (with some randomization) \"\"\" y_true = np.arange(num_points)", "# x = np.random.uniform(bounds[0], bounds[1], n_points) x = np.linspace(bounds[0], bounds[1], n_points) f =", "n_points) f = np.sin(x) std = 0.01 + np.abs(x - 5.0) / 10.0", "\"\"\" y_true = np.arange(num_points) y_pred = np.arange(num_points) + np.random.random((num_points,)) y_std = np.abs(y_true -", "some randomization) \"\"\" y_true = np.arange(num_points) y_pred = np.arange(num_points) + np.random.random((num_points,)) y_std =", "noise return f, std, y, x def curvy_cosine(x): \"\"\" Curvy cosine function. Parameters" ]
[ "= cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace = 9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod =", "w y1 = y y2 = y + h image = self.image[y1:y2, x1:x2]", "def getBilateralSubRegion(self, x, y, w, h): x1 = x x2 = x +", "y y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize,", "image self.cols = image.shape[0] self.rows = image.shape[1] def getSubRegion(self, x, y, w, h):", "self.contourApproximationMethod) def getContoursOverlayImage(self): contours = self.getContours() if(contours == None): return self.image else: return", "thresholdType = cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace = 9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod", "as np class Frame: path = \"\" frameNumber = 1 image cols =", "self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours = self.getContours() if(contours == None): return self.image else:", "0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y, w, h):", "cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y, w, h): x1 = x x2 =", "= image.shape[0] self.rows = image.shape[1] def getSubRegion(self, x, y, w, h): x1 =", "low, high, self.thresholdType) def getThresholdSubRegion(self, low, high, x, y, w, h): x1 =", "cv2.filter2D(image, -1, kernel) def getThreshold(self, low, high): return cv2.threshold(self.image, low, high, self.thresholdType) def", "y + h image = self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self):", "getContours(self): gray = self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours = self.getContours()", "= self.image[y1:y2, x1:x2] kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(image, -1,", "path, image): self.frameNumber = frameNumber self.path = path self.image = image self.cols =", "= frameNumber self.path = path self.image = image self.cols = image.shape[0] self.rows =", "y, w, h): x1 = x x2 = x + w y1 =", "= y + h return self.image[y1:y2, x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def", "y2 = y + h image = self.image[y1:y2, x1:x2] kernel = np.ones((5, 5),", "return cv2.filter2D(image, -1, kernel) def getThreshold(self, low, high): return cv2.threshold(self.image, low, high, self.thresholdType)", "0, 255) contourBorderSize = 1 def __init__(self, frameNumber, path, image): self.frameNumber = frameNumber", "= y y2 = y + h return self.image[y1:y2, x1:x2] def getGrayscale(self): return", "cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y, w, h): contours = self.getContoursSubRegion(x, y,", "getSubRegion(self, x, y, w, h): x1 = x x2 = x + w", "+ w y1 = y y2 = y + h image = self.image[y1:y2,", "1 def __init__(self, frameNumber, path, image): self.frameNumber = frameNumber self.path = path self.image", "image = self.image[y1:y2, x1:x2] kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(image,", "bilateralKernelSize = 9 bilateralSigmaSpace = 9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers", "self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel = np.ones((5, 5), np.float32) /", "return cv2.threshold(self.image, low, high, self.thresholdType) def getThresholdSubRegion(self, low, high, x, y, w, h):", "= y + h image = self.image[y1:y2, x1:x2] kernel = np.ones((5, 5), np.float32)", "def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y, w, h): x1", "return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x, y, w, h): gray", "cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray = self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def", "def getContours(self): gray = self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours =", "os import numpy as np class Frame: path = \"\" frameNumber = 1", "np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self, x, y, w, h):", "h image = self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image,", "return self.image else: return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x, y,", "cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor = (0, 0, 255) contourBorderSize", "+ h image = self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel =", "self.getContoursSubRegion(x, y, w, h) if(contours == None): return self.getSubRegion(x, y, w, h) else:", "None): return self.getSubRegion(x, y, w, h) else: return cv2.drawContours(self.getSubRegion(x, y, w, h), contours,", "getThresholdSubRegion(self, low, high, x, y, w, h): x1 = x x2 = x", "Frame: path = \"\" frameNumber = 1 image cols = 0 rows =", "image): self.frameNumber = frameNumber self.path = path self.image = image self.cols = image.shape[0]", "countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor = (0, 0,", "self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self, x, y, w, h): x1 =", "h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y, w, h): contours =", "cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours = self.getContours() if(contours == None): return self.image", "x1:x2] kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(image, -1, kernel) def", "high, x, y, w, h): x1 = x x2 = x + w", "x, y, w, h): contours = self.getContoursSubRegion(x, y, w, h) if(contours == None):", "cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y, w, h): contours = self.getContoursSubRegion(x, y, w, h)", "y y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY)", "def getSubRegion(self, x, y, w, h): x1 = x x2 = x +", "low, high, x, y, w, h): x1 = x x2 = x +", "= x + w y1 = y y2 = y + h image", "getContoursSubRegion(self, x, y, w, h): gray = self.getGrayscaleSubRegion(x, y, w, h) return cv2.findContours(gray,", "h) if(contours == None): return self.getSubRegion(x, y, w, h) else: return cv2.drawContours(self.getSubRegion(x, y,", "return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray = self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod)", "def getThreshold(self, low, high): return cv2.threshold(self.image, low, high, self.thresholdType) def getThresholdSubRegion(self, low, high,", "= image.shape[1] def getSubRegion(self, x, y, w, h): x1 = x x2 =", "+ w y1 = y y2 = y + h return self.image[y1:y2, x1:x2]", "+ h image = self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self): return", "contours = self.getContours() if(contours == None): return self.image else: return cv2.drawContours(self.image, contours, self.contourLayers,", "self.image else: return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x, y, w,", "self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x, y, w, h): gray = self.getGrayscaleSubRegion(x, y,", "w, h): contours = self.getContoursSubRegion(x, y, w, h) if(contours == None): return self.getSubRegion(x,", "h return self.image[y1:y2, x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y,", "h): gray = self.getGrayscaleSubRegion(x, y, w, h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self,", "configured opencv settings simpleBlurAmount = 25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace", "y1 = y y2 = y + h image = self.image[y1:y2, x1:x2] kernel", "y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5), 0)", "np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(image, -1, kernel) def getThreshold(self, low, high):", "return self.getSubRegion(x, y, w, h) else: return cv2.drawContours(self.getSubRegion(x, y, w, h), contours, self.contourLayers,", "self.image = image self.cols = image.shape[0] self.rows = image.shape[1] def getSubRegion(self, x, y,", "self.contourBorderSize) def getContoursSubRegion(self, x, y, w, h): gray = self.getGrayscaleSubRegion(x, y, w, h)", "= cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor = (0, 0, 255)", "path = \"\" frameNumber = 1 image cols = 0 rows = 0", "25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace = 9 countourRetrivalMode = cv2.RETR_LIST", "= (0, 0, 255) contourBorderSize = 1 def __init__(self, frameNumber, path, image): self.frameNumber", "= y + h image = self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5), 0) def", "y + h image = self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self):", "import numpy as np class Frame: path = \"\" frameNumber = 1 image", "contourLayers = -1 contourColor = (0, 0, 255) contourBorderSize = 1 def __init__(self,", "= self.getGrayscaleSubRegion(x, y, w, h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y,", "return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return", "bilateralSigmaSpace = 9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor", "y y2 = y + h image = self.image[y1:y2, x1:x2] kernel = np.ones((5,", "w, h): x1 = x x2 = x + w y1 = y", "== None): return self.getSubRegion(x, y, w, h) else: return cv2.drawContours(self.getSubRegion(x, y, w, h),", "= -1 contourColor = (0, 0, 255) contourBorderSize = 1 def __init__(self, frameNumber,", "simpleBlurAmount = 25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace = 9 countourRetrivalMode", "x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y, w, h): x1", "self.bilateralSigmaSpace) def getContours(self): gray = self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours", "= \"\" frameNumber = 1 image cols = 0 rows = 0 #", "return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y, w, h): contours = self.getContoursSubRegion(x,", "self.cols = image.shape[0] self.rows = image.shape[1] def getSubRegion(self, x, y, w, h): x1", "getGrayscaleSubRegion(self, x, y, w, h): x1 = x x2 = x + w", "kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self,", "x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray = self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode,", "return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y, w, h): x1 = x x2", "self.rows = image.shape[1] def getSubRegion(self, x, y, w, h): x1 = x x2", "+ h image = self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray", "image.shape[0] self.rows = image.shape[1] def getSubRegion(self, x, y, w, h): x1 = x", "getContoursOverlayImage(self): contours = self.getContours() if(contours == None): return self.image else: return cv2.drawContours(self.image, contours,", "y1 = y y2 = y + h image = self.image[y1:y2, x1:x2] return", "np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self, x, y,", "= y + h image = self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def", "None): return self.image else: return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x,", "y + h image = self.image[y1:y2, x1:x2] kernel = np.ones((5, 5), np.float32) /", "= self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours = self.getContours() if(contours ==", "= self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace)", "= 1 def __init__(self, frameNumber, path, image): self.frameNumber = frameNumber self.path = path", "high): return cv2.threshold(self.image, low, high, self.thresholdType) def getThresholdSubRegion(self, low, high, x, y, w,", "y, w, h): contours = self.getContoursSubRegion(x, y, w, h) if(contours == None): return", "w, h) if(contours == None): return self.getSubRegion(x, y, w, h) else: return cv2.drawContours(self.getSubRegion(x,", "return self.image[y1:y2, x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y, w,", "np class Frame: path = \"\" frameNumber = 1 image cols = 0", "= image self.cols = image.shape[0] self.rows = image.shape[1] def getSubRegion(self, x, y, w,", "\"\" frameNumber = 1 image cols = 0 rows = 0 # default", "contourColor = (0, 0, 255) contourBorderSize = 1 def __init__(self, frameNumber, path, image):", "def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y, w, h): x1 =", "def getThresholdSubRegion(self, low, high, x, y, w, h): x1 = x x2 =", "settings simpleBlurAmount = 25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace = 9", "= 0 rows = 0 # default configured opencv settings simpleBlurAmount = 25", "self.image[y1:y2, x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y, w, h):", "h image = self.image[y1:y2, x1:x2] kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return", "y + h return self.image[y1:y2, x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self,", "y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def", "return cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self, x, y, w, h): x1 = x", "0 # default configured opencv settings simpleBlurAmount = 25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize", "= y y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image,", "image = self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray = self.getGrayscale()", "h): contours = self.getContoursSubRegion(x, y, w, h) if(contours == None): return self.getSubRegion(x, y,", "high, self.thresholdType) def getThresholdSubRegion(self, low, high, x, y, w, h): x1 = x", "y y2 = y + h return self.image[y1:y2, x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image,", "x1:x2] return cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self,", "self.simpleBlurAmount return cv2.filter2D(image, -1, kernel) def getThreshold(self, low, high): return cv2.threshold(self.image, low, high,", "y, w, h): gray = self.getGrayscaleSubRegion(x, y, w, h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)", "= np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(image, -1, kernel) def getThreshold(self, low,", "-1 contourColor = (0, 0, 255) contourBorderSize = 1 def __init__(self, frameNumber, path,", "getBilateralSubRegion(self, x, y, w, h): x1 = x x2 = x + w", "cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y, w, h): contours = self.getContoursSubRegion(x, y, w,", "y, w, h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y, w, h):", "contourBorderSize = 1 def __init__(self, frameNumber, path, image): self.frameNumber = frameNumber self.path =", "cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace = 9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE", "5), np.float32) / self.simpleBlurAmount return cv2.filter2D(image, -1, kernel) def getThreshold(self, low, high): return", "gray = self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours = self.getContours() if(contours", "else: return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x, y, w, h):", "if(contours == None): return self.getSubRegion(x, y, w, h) else: return cv2.drawContours(self.getSubRegion(x, y, w,", "h image = self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel = np.ones((5,", "contours, self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x, y, w, h): gray = self.getGrayscaleSubRegion(x,", "y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace)", "y, w, h) else: return cv2.drawContours(self.getSubRegion(x, y, w, h), contours, self.contourLayers, self.contourColor, self.contourBorderSize)", "9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor = (0,", "= self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel = np.ones((5, 5), np.float32)", "/ self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self, x, y, w, h): x1", "if(contours == None): return self.image else: return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize) def", "h image = self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray =", "x1 = x x2 = x + w y1 = y y2 =", "= y y2 = y + h image = self.image[y1:y2, x1:x2] kernel =", "+ h return self.image[y1:y2, x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x,", "def getContoursSubRegion(self, x, y, w, h): gray = self.getGrayscaleSubRegion(x, y, w, h) return", "-1, kernel) def getThreshold(self, low, high): return cv2.threshold(self.image, low, high, self.thresholdType) def getThresholdSubRegion(self,", "gray = self.getGrayscaleSubRegion(x, y, w, h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x,", "frameNumber self.path = path self.image = image self.cols = image.shape[0] self.rows = image.shape[1]", "y y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5),", "frameNumber = 1 image cols = 0 rows = 0 # default configured", "image = self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize,", "cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y, w, h): x1 = x x2 = x", "self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours = self.getContours() if(contours == None):", "np.float32) / self.simpleBlurAmount return cv2.filter2D(image, -1, kernel) def getThreshold(self, low, high): return cv2.threshold(self.image,", "self.thresholdType) def getThresholdSubRegion(self, low, high, x, y, w, h): x1 = x x2", "def getSimpleBlur(self): kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel)", "self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y, w, h): x1 = x x2 = x", "getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y, w, h): x1 =", "0 rows = 0 # default configured opencv settings simpleBlurAmount = 25 thresholdType", "cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor = (0, 0, 255) contourBorderSize = 1 def", "getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY) def getGrayscaleSubRegion(self, x, y, w, h): x1 = x", "cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image, -1,", "= cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor = (0, 0, 255) contourBorderSize = 1", "<reponame>ramity/apexcv import os import numpy as np class Frame: path = \"\" frameNumber", "self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y, w, h): x1 = x x2 =", "self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x, y, w, h): gray = self.getGrayscaleSubRegion(x, y, w,", "= 0 # default configured opencv settings simpleBlurAmount = 25 thresholdType = cv2.THRESH_BINARY", "cv2.threshold(self.image, low, high, self.thresholdType) def getThresholdSubRegion(self, low, high, x, y, w, h): x1", "getSimpleBlur(self): kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel) def", "image.shape[1] def getSubRegion(self, x, y, w, h): x1 = x x2 = x", "h): x1 = x x2 = x + w y1 = y y2", "/ self.simpleBlurAmount return cv2.filter2D(image, -1, kernel) def getThreshold(self, low, high): return cv2.threshold(self.image, low,", "9 bilateralSigmaSpace = 9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers = -1", "self.getContours() if(contours == None): return self.image else: return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize)", "w, h): gray = self.getGrayscaleSubRegion(x, y, w, h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def", "x + w y1 = y y2 = y + h image =", "self.getGrayscaleSubRegion(x, y, w, h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y, w,", "def getContoursOverlayImage(self, x, y, w, h): contours = self.getContoursSubRegion(x, y, w, h) if(contours", "def __init__(self, frameNumber, path, image): self.frameNumber = frameNumber self.path = path self.image =", "y, w, h) if(contours == None): return self.getSubRegion(x, y, w, h) else: return", "= y y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.cvtColor(image,", "def getGrayscaleSubRegion(self, x, y, w, h): x1 = x x2 = x +", "= y + h image = self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self):", "path self.image = image self.cols = image.shape[0] self.rows = image.shape[1] def getSubRegion(self, x,", "= 25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace = 9 countourRetrivalMode =", "self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray = self.getGrayscale() return cv2.findContours(gray,", "== None): return self.image else: return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self,", "self.frameNumber = frameNumber self.path = path self.image = image self.cols = image.shape[0] self.rows", "kernel) def getSimpleBlurSubRegion(self, x, y, w, h): x1 = x x2 = x", "self.image[y1:y2, x1:x2] kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(image, -1, kernel)", "kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(image, -1, kernel) def getThreshold(self,", "default configured opencv settings simpleBlurAmount = 25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize = 9", "getThreshold(self, low, high): return cv2.threshold(self.image, low, high, self.thresholdType) def getThresholdSubRegion(self, low, high, x,", "low, high): return cv2.threshold(self.image, low, high, self.thresholdType) def getThresholdSubRegion(self, low, high, x, y,", "def getSimpleBlurSubRegion(self, x, y, w, h): x1 = x x2 = x +", "return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y, w, h): x1 = x", "return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self): contours = self.getContours() if(contours == None): return", "cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image,", "frameNumber, path, image): self.frameNumber = frameNumber self.path = path self.image = image self.cols", "self.getSubRegion(x, y, w, h) else: return cv2.drawContours(self.getSubRegion(x, y, w, h), contours, self.contourLayers, self.contourColor,", "= np.ones((5, 5), np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self, x,", "# default configured opencv settings simpleBlurAmount = 25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize =", "self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray = self.getGrayscale() return cv2.findContours(gray, self.countourRetrivalMode, self.contourApproximationMethod) def getContoursOverlayImage(self):", "def getContoursOverlayImage(self): contours = self.getContours() if(contours == None): return self.image else: return cv2.drawContours(self.image,", "__init__(self, frameNumber, path, image): self.frameNumber = frameNumber self.path = path self.image = image", "w y1 = y y2 = y + h return self.image[y1:y2, x1:x2] def", "cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self, x, y, w, h): x1 = x x2", "getSimpleBlurSubRegion(self, x, y, w, h): x1 = x x2 = x + w", "x, y, w, h): x1 = x x2 = x + w y1", "y1 = y y2 = y + h return self.image[y1:y2, x1:x2] def getGrayscale(self):", "cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y,", "kernel) def getThreshold(self, low, high): return cv2.threshold(self.image, low, high, self.thresholdType) def getThresholdSubRegion(self, low,", "getContoursOverlayImage(self, x, y, w, h): contours = self.getContoursSubRegion(x, y, w, h) if(contours ==", "x, y, w, h): gray = self.getGrayscaleSubRegion(x, y, w, h) return cv2.findContours(gray, cv2.RETR_TREE,", "(0, 0, 255) contourBorderSize = 1 def __init__(self, frameNumber, path, image): self.frameNumber =", "+ h image = self.image[y1:y2, x1:x2] kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount", "opencv settings simpleBlurAmount = 25 thresholdType = cv2.THRESH_BINARY bilateralKernelSize = 9 bilateralSigmaSpace =", "x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel = np.ones((5, 5), np.float32) / self.simpleBlurAmount", "255) contourBorderSize = 1 def __init__(self, frameNumber, path, image): self.frameNumber = frameNumber self.path", "x + w y1 = y y2 = y + h return self.image[y1:y2,", "= path self.image = image self.cols = image.shape[0] self.rows = image.shape[1] def getSubRegion(self,", "y2 = y + h return self.image[y1:y2, x1:x2] def getGrayscale(self): return cv2.cvtColor(self.image, cv2.COLOR_RGB2GRAY)", "image = self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel = np.ones((5, 5),", "class Frame: path = \"\" frameNumber = 1 image cols = 0 rows", "return cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x,", "(5,5), 0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y, w,", "cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getBilateralSubRegion(self, x, y, w, h): x1 = x x2", "= 9 bilateralSigmaSpace = 9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers =", "= self.getContours() if(contours == None): return self.image else: return cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor,", "cv2.drawContours(self.image, contours, self.contourLayers, self.contourColor, self.contourBorderSize) def getContoursSubRegion(self, x, y, w, h): gray =", "numpy as np class Frame: path = \"\" frameNumber = 1 image cols", "y + h image = self.image[y1:y2, x1:x2] return cv2.cvtColor(image, cv2.COLOR_RGB2GRAY) def getSimpleBlur(self): kernel", "rows = 0 # default configured opencv settings simpleBlurAmount = 25 thresholdType =", "contours = self.getContoursSubRegion(x, y, w, h) if(contours == None): return self.getSubRegion(x, y, w,", "= x + w y1 = y y2 = y + h return", "= x x2 = x + w y1 = y y2 = y", "= 1 image cols = 0 rows = 0 # default configured opencv", "cols = 0 rows = 0 # default configured opencv settings simpleBlurAmount =", "contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor = (0, 0, 255) contourBorderSize =", "= y y2 = y + h image = self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image,", "self.image[y1:y2, x1:x2] return cv2.GaussianBlur(image, (5,5), 0) def getBilateral(self): return cv2.adaptiveBilateralFilter(self.image, self.bilateralKernelSize, self.bilateralSigmaSpace) def", "image cols = 0 rows = 0 # default configured opencv settings simpleBlurAmount", "w, h) return cv2.findContours(gray, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) def getContoursOverlayImage(self, x, y, w, h): contours", "1 image cols = 0 rows = 0 # default configured opencv settings", "= self.image[y1:y2, x1:x2] return cv2.adaptiveBilateralFilter(image, self.bilateralKernelSize, self.bilateralSigmaSpace) def getContours(self): gray = self.getGrayscale() return", "5), np.float32) / self.simpleBlurAmount return cv2.filter2D(self.image, -1, kernel) def getSimpleBlurSubRegion(self, x, y, w,", "= self.getContoursSubRegion(x, y, w, h) if(contours == None): return self.getSubRegion(x, y, w, h)", "x2 = x + w y1 = y y2 = y + h", "x x2 = x + w y1 = y y2 = y +", "import os import numpy as np class Frame: path = \"\" frameNumber =", "self.path = path self.image = image self.cols = image.shape[0] self.rows = image.shape[1] def", "-1, kernel) def getSimpleBlurSubRegion(self, x, y, w, h): x1 = x x2 =", "= 9 countourRetrivalMode = cv2.RETR_LIST contourApproximationMethod = cv2.CHAIN_APPROX_SIMPLE contourLayers = -1 contourColor =" ]
[ "newlines into url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy script", "procedure,parameter, downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm", "= args.komp2Port if args.komp2Port<>None else omeroProps['komp2port'] komp2db = args.komp2Db if args.komp2Db<>None else omeroProps['komp2db']", "parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print 'number found in solr='+str(numFoundInSolr)+' number", "runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to get these passed in as arguments - the", "is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl,", "downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm #", "for root of destination to store images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to", "of failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center,", "OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except: omeroProps = {} rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl", "omeroProps['rootdestinationdir'] #note cant split this url over a few lines as puts in", "print \"file does not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber", "responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv): parser = argparse.ArgumentParser( description='Get the download_file_paths", "#experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print 'number", "args.komp2Port if args.komp2Port<>None else omeroProps['komp2port'] komp2db = args.komp2Db if args.komp2Db<>None else omeroProps['komp2db'] komp2User", "get the list of download urls and the data source, experiment, procdure and", "the data source, experiment, procdure and parameter and observation id for the images", "description='Get the download_file_paths (http mousephenotype uris) from the experiment core and then downloads", ") parser.add_argument('-db', '--database', dest='komp2Db', help='Database to connect to for komp2db' ) parser.add_argument('--pass', dest='komp2Pass',", "continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of images we have=\"+str(totalNumberOfImagesWeHave) if __name__", "destination to store images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to root of solr", "\"destination directory for copy is \"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file", "getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv): parser", "{} rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl <> None else omeroProps['solrurl'] komp2Host = args.komp2Host", "downloads the images\"\"\" import os import requests import json import sys import os.path", "python image copy script for impc images\") print 'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery;", "which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy script for impc images\")", "not unique and has been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory for", "import os import requests import json import sys import os.path import sys import", "runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to get these passed in", "help='URL to root of solr index' ) parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for server", "for <PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev', help='profile from which to read config: dev,", "import sys import os.path import sys import argparse import mysql.connector import shutil from", "dest='komp2Db', help='Database to connect to for komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>'", "mousephenotype uris) from the experiment core and then downloads the images\"\"\" import os", "if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError: print \"file does not exist \"+str(dstfilename)+\"", "args.komp2Host if args.komp2Host<>None else omeroProps['komp2host'] komp2Port = args.komp2Port if args.komp2Port<>None else omeroProps['komp2port'] komp2db", "these passed in as arguments - the host and db name etc for", "cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def", "#print 'saving file to '+destPath if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError: print", "\"\"\" need to get these passed in as arguments - the host and", "dest='komp2Pass', help='Password for <PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev', help='profile from which to read", "prod, live, ...') args = parser.parse_args() # Get values from property file and", "etc for jenkins to run first get the list of download urls and", "createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx, observation_id,", "not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError: print \"file does not exist \"+str(dstfilename)+\" continuing\"", "to read config: dev, prod, live, ...') args = parser.parse_args() # Get values", "args.rootSolrUrl <> None else omeroProps['solrurl'] komp2Host = args.komp2Host if args.komp2Host<>None else omeroProps['komp2host'] komp2Port", "import shutil from common import splitString from database import getDbConnection from OmeroPropertiesParser import", "property file and use as defaults that can be overridden # by command", "= args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir']", "help='profile from which to read config: dev, prod, live, ...') args = parser.parse_args()", "dest='komp2Port', help='Port by which to connect to komp2 db' ) parser.add_argument('-u', '--user', dest='komp2User',", "komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to", "json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id']", "if args.komp2Db<>None else omeroProps['komp2db'] komp2User = args.komp2User if args.komp2User<>None else omeroProps['komp2user'] komp2Pass =", "images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for root of destination to store images'", "if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant split this url over a few lines", "then downloads the images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for root of destination", "few lines as puts in newlines into url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\"", "specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory for copy is \"+destDirectory if not", "parser = argparse.ArgumentParser( description='Get the download_file_paths (http mousephenotype uris) from the experiment core", "url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy script for impc", "= argparse.ArgumentParser( description='Get the download_file_paths (http mousephenotype uris) from the experiment core and", "parser.add_argument('-u', '--user', dest='komp2User', help='Username for connecting to komp2 db' ) parser.add_argument('-db', '--database', dest='komp2Db',", "createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print", "dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error the filePath is not unique and has been", "rootDestinationDir): \"\"\" need to get these passed in as arguments - the host", "program gets the download_file_paths (http mousephenotype uris) from the experiment core and then", "print 'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User,", "downloads the images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for root of destination to", "'--rootDestinationDir', dest='rootDestinationDir', help='Directory for root of destination to store images' ) parser.add_argument('-s', '--rootSolrUrl',", "rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir)", "splitString from database import getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0", "the host and db name etc for jenkins to run first get the", "to connect to komp2 db' ) parser.add_argument('-u', '--user', dest='komp2User', help='Username for connecting to", "for copy is \"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file to '+destPath", "- the host and db name etc for jenkins to run first get", "defaults that can be overridden # by command line parameters try: pp =", "except: omeroProps = {} rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl <> None else omeroProps['solrurl']", "#/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris: print", "dev, prod, live, ...') args = parser.parse_args() # Get values from property file", "args = parser.parse_args() # Get values from property file and use as defaults", "puts in newlines into url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image", "is not unique and has been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory", "the filePath is not unique and has been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print", "of solr index' ) parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for server hosting komp2 db'", "import getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv):", "this url over a few lines as puts in newlines into url which", "parameter, downloadFilePath): global totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id,", "args.komp2Port<>None else omeroProps['komp2port'] komp2db = args.komp2Db if args.komp2Db<>None else omeroProps['komp2db'] komp2User = args.komp2User", "print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx,", "and observation id for the images \"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for", "split this url over a few lines as puts in newlines into url", "observation id for the images \"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc", "else omeroProps['solrurl'] komp2Host = args.komp2Host if args.komp2Host<>None else omeroProps['komp2host'] komp2Port = args.komp2Port if", "and has been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory for copy is", "parser.parse_args() # Get values from property file and use as defaults that can", "responseFailed global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory \"+str(directory)", "over a few lines as puts in newlines into url which doesn't work", "from database import getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set()", "to connect to for komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>' ) parser.add_argument('--profile',", "= args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant split this url over a", "return directory def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave global", "def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave global responseFailed global", "the experiment core and then downloads the images\"\"\" import os import requests import", "the download_file_paths (http mousephenotype uris) from the experiment core and then downloads the", "overridden # by command line parameters try: pp = OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps()", "totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of images we have=\"+str(totalNumberOfImagesWeHave) if __name__ == \"__main__\": main(sys.argv[1:])", "'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir):", "name etc for jenkins to run first get the list of download urls", "'number found in solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave)", "docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id,", "= args.komp2User if args.komp2User<>None else omeroProps['komp2user'] komp2Pass = args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass']", "requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center,", "procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure,", "totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of images we have=\"+str(totalNumberOfImagesWeHave) if __name__ ==", "work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy script for impc images\") print 'rootDestinationDir", "to get these passed in as arguments - the host and db name", "uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory for copy is \"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory)", "the images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for root of destination to store", "dest='rootSolrUrl', help='URL to root of solr index' ) parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for", "Get values from property file and use as defaults that can be overridden", "processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps", "for impc images\") print 'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host,", "docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id']", "args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant split this url over a few", "then downloads the images\"\"\" import os import requests import json import sys import", "datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path)", "in solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close()", "and db name etc for jenkins to run first get the list of", "phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print", "global responseFailed global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory", "server hosting komp2 db' ) parser.add_argument('-p', '--port', dest='komp2Port', help='Port by which to connect", "if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file to '+destPath if not os.path.isfile(destPath): try:", "'---------------------!!!!!!!!!!error the filePath is not unique and has been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath)", "destDirectory=os.path.dirname(destPath) #print \"destination directory for copy is \"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print", "(http mousephenotype uris) from the experiment core and then downloads the images\"\"\" import", "store images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to root of solr index' )", "numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv): parser = argparse.ArgumentParser( description='Get the download_file_paths (http", "#print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths are /nfs/public/ro/pheno-archive-images/images/impc", "def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to get these passed in as arguments -", "= parser.parse_args() # Get values from property file and use as defaults that", "can be overridden # by command line parameters try: pp = OmeroPropertiesParser(args.profile) omeroProps", "omeroProps['komp2port'] komp2db = args.komp2Db if args.komp2Db<>None else omeroProps['komp2db'] komp2User = args.komp2User if args.komp2User<>None", "experiment, procdure and parameter and observation id for the images \"\"\" v =", "script for impc images\") print 'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl", "args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note", "argparse.ArgumentParser( description='Get the download_file_paths (http mousephenotype uris) from the experiment core and then", "does not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of images", "komp2Host = args.komp2Host if args.komp2Host<>None else omeroProps['komp2host'] komp2Port = args.komp2Port if args.komp2Port<>None else", "'--database', dest='komp2Db', help='Database to connect to for komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password for", "args.komp2Db if args.komp2Db<>None else omeroProps['komp2db'] komp2User = args.komp2User if args.komp2User<>None else omeroProps['komp2user'] komp2Pass", "None else omeroProps['solrurl'] komp2Host = args.komp2Host if args.komp2Host<>None else omeroProps['komp2host'] komp2Port = args.komp2Port", "rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print 'number found in solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+'", "directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global", "import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv): parser = argparse.ArgumentParser( description='Get", "to store images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to root of solr index'", "from the experiment core and then downloads the images\"\"\" import os import requests", "as puts in newlines into url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python", "live, ...') args = parser.parse_args() # Get values from property file and use", "and then downloads the images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for root of", "if args.komp2Port<>None else omeroProps['komp2port'] komp2db = args.komp2Db if args.komp2Db<>None else omeroProps['komp2db'] komp2User =", "for server hosting komp2 db' ) parser.add_argument('-p', '--port', dest='komp2Port', help='Port by which to", "\"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris:", "in as arguments - the host and db name etc for jenkins to", "copy is \"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file to '+destPath if", ") parser.add_argument('--profile', dest='profile', default='dev', help='profile from which to read config: dev, prod, live,", "os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file to '+destPath if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except", "total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter])", ") parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for root of destination to store images' )", "komp2Port = args.komp2Port if args.komp2Port<>None else omeroProps['komp2port'] komp2db = args.komp2Db if args.komp2Db<>None else", "of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path):", "json import sys import os.path import sys import argparse import mysql.connector import shutil", "parser.add_argument('--profile', dest='profile', default='dev', help='profile from which to read config: dev, prod, live, ...')", "omeroProps['komp2user'] komp2Pass = args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None", "directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename)", "from property file and use as defaults that can be overridden # by", "to for komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev',", "number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter,", "command line parameters try: pp = OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except: omeroProps =", "omeroProps['komp2host'] komp2Port = args.komp2Port if args.komp2Port<>None else omeroProps['komp2port'] komp2db = args.komp2Db if args.komp2Db<>None", "rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl <> None else omeroProps['solrurl'] komp2Host = args.komp2Host if", "if args.komp2Host<>None else omeroProps['komp2host'] komp2Port = args.komp2Port if args.komp2Port<>None else omeroProps['komp2port'] komp2db =", "# by command line parameters try: pp = OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except:", "directory def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave global responseFailed", "read config: dev, prod, live, ...') args = parser.parse_args() # Get values from", "...') args = parser.parse_args() # Get values from property file and use as", "omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant split this url", "of download urls and the data source, experiment, procdure and parameter and observation", "procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center,", "jenkins to run first get the list of download urls and the data", "failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id,", "use as defaults that can be overridden # by command line parameters try:", "default='dev', help='profile from which to read config: dev, prod, live, ...') args =", "if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant", "\"\"\"this program gets the download_file_paths (http mousephenotype uris) from the experiment core and", "'saving file to '+destPath if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError: print \"file", "cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to get these passed in as", ") parser.add_argument('-u', '--user', dest='komp2User', help='Username for connecting to komp2 db' ) parser.add_argument('-db', '--database',", "connect to for komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>' ) parser.add_argument('--profile', dest='profile',", "(http mousephenotype uris) from the experiment core and then downloads the images' )", "procedure_stable_id, parameter_stable_id, download_file_path) print 'number found in solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+' number", "if args.komp2User<>None else omeroProps['komp2user'] komp2Pass = args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir =", "help='Password for <PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev', help='profile from which to read config:", "import requests import json import sys import os.path import sys import argparse import", "v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment']", "id for the images \"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in", "experiment core and then downloads the images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for", "observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print 'number found in", "def main(argv): parser = argparse.ArgumentParser( description='Get the download_file_paths (http mousephenotype uris) from the", "from common import splitString from database import getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0", "phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir,", "parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev', help='profile from which to", "if dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error the filePath is not unique and has", "run first get the list of download urls and the data source, experiment,", "in uniqueUris: print '---------------------!!!!!!!!!!error the filePath is not unique and has been specified", "arguments - the host and db name etc for jenkins to run first", "#!/usr/bin/python \"\"\"this program gets the download_file_paths (http mousephenotype uris) from the experiment core", "core and then downloads the images\"\"\" import os import requests import json import", "images\") print 'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db,", "args.komp2User<>None else omeroProps['komp2user'] komp2Pass = args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir", "connect to komp2 db' ) parser.add_argument('-u', '--user', dest='komp2User', help='Username for connecting to komp2", "parameters try: pp = OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except: omeroProps = {} rootSolrUrl", "cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need", "= json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id']", "exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of images we have=\"+str(totalNumberOfImagesWeHave)", "database import getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def", "lines as puts in newlines into url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running", "of destination to store images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to root of", "komp2Pass = args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None else", "pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print 'number found", "pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave", "shutil from common import splitString from database import getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser", "values from property file and use as defaults that can be overridden #", "numFoundInSolr=0 uniqueUris=set() def main(argv): parser = argparse.ArgumentParser( description='Get the download_file_paths (http mousephenotype uris)", "'--user', dest='komp2User', help='Username for connecting to komp2 db' ) parser.add_argument('-db', '--database', dest='komp2Db', help='Database", "download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath):", "try: shutil.copyfile(dstfilename,destPath) except IOError: print \"file does not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if", "args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant split", "= args.komp2Host if args.komp2Host<>None else omeroProps['komp2host'] komp2Port = args.komp2Port if args.komp2Port<>None else omeroProps['komp2port']", "mysql.connector import shutil from common import splitString from database import getDbConnection from OmeroPropertiesParser", "from the experiment core and then downloads the images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir',", "import mysql.connector import shutil from common import splitString from database import getDbConnection from", "else omeroProps['rootdestinationdir'] #note cant split this url over a few lines as puts", "download_file_paths (http mousephenotype uris) from the experiment core and then downloads the images\"\"\"", "\"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths", "omeroProps['komp2db'] komp2User = args.komp2User if args.komp2User<>None else omeroProps['komp2user'] komp2Pass = args.komp2Pass if args.komp2Pass<>None", "config: dev, prod, live, ...') args = parser.parse_args() # Get values from property", "dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths are", "the images \"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in docs: download_file_path=doc['download_file_path']", "into url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy script for", "else omeroProps['komp2host'] komp2Port = args.komp2Port if args.komp2Port<>None else omeroProps['komp2port'] komp2db = args.komp2Db if", ") parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to root of solr index' ) parser.add_argument('-H', '--host',", "import splitString from database import getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0", "else omeroProps['komp2port'] komp2db = args.komp2Db if args.komp2Db<>None else omeroProps['komp2db'] komp2User = args.komp2User if", "'+destPath if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError: print \"file does not exist", "help='Directory for root of destination to store images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL", "parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for root of destination to store images' ) parser.add_argument('-s',", "images\"\"\" import os import requests import json import sys import os.path import sys", "dest='komp2Host', help='Hostname for server hosting komp2 db' ) parser.add_argument('-p', '--port', dest='komp2Port', help='Port by", "parameter_stable_id, download_file_path) print 'number found in solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+' number of", "image copy script for impc images\") print 'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print", "help='Database to connect to for komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>' )", "\"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx,", "destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename", "phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx, observation_id, rootDestinationDir,", "= pp.getOmeroProps() except: omeroProps = {} rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl <> None", "hosting komp2 db' ) parser.add_argument('-p', '--port', dest='komp2Port', help='Port by which to connect to", "argparse import mysql.connector import shutil from common import splitString from database import getDbConnection", "images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to root of solr index' ) parser.add_argument('-H',", "line parameters try: pp = OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except: omeroProps = {}", "main(argv): parser = argparse.ArgumentParser( description='Get the download_file_paths (http mousephenotype uris) from the experiment", "komp2 db' ) parser.add_argument('-p', '--port', dest='komp2Port', help='Port by which to connect to komp2", "source, experiment, procdure and parameter and observation id for the images \"\"\" v", "rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps directory =", "dest='rootDestinationDir', help='Directory for root of destination to store images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl',", "has been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory for copy is \"+destDirectory", "try: pp = OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except: omeroProps = {} rootSolrUrl =", "passed in as arguments - the host and db name etc for jenkins", "number of failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir,", "'--port', dest='komp2Port', help='Port by which to connect to komp2 db' ) parser.add_argument('-u', '--user',", "print '---------------------!!!!!!!!!!error the filePath is not unique and has been specified before:'+dstfilename uniqueUris.add(dstfilename)", "else omeroProps['komp2user'] komp2Pass = args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if", "for komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev', help='profile", "not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of images we", "db' ) parser.add_argument('-p', '--port', dest='komp2Port', help='Port by which to connect to komp2 db'", "shutil.copyfile(dstfilename,destPath) except IOError: print \"file does not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0", "else omeroProps['komp2db'] komp2User = args.komp2User if args.komp2User<>None else omeroProps['komp2user'] komp2Pass = args.komp2Pass if", "global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1])", "directory for copy is \"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file to", "to '+destPath if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError: print \"file does not", "dest='komp2User', help='Username for connecting to komp2 db' ) parser.add_argument('-db', '--database', dest='komp2Db', help='Database to", ") parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for server hosting komp2 db' ) parser.add_argument('-p', '--port',", "komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to get these passed", "= args.komp2Db if args.komp2Db<>None else omeroProps['komp2db'] komp2User = args.komp2User if args.komp2User<>None else omeroProps['komp2user']", "gets the download_file_paths (http mousephenotype uris) from the experiment core and then downloads", "omeroProps = {} rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl <> None else omeroProps['solrurl'] komp2Host", "first get the list of download urls and the data source, experiment, procdure", "procdure and parameter and observation id for the images \"\"\" v = json.loads(requests.get(solrUrl).text)", "procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print 'number found in solr='+str(numFoundInSolr)+'", "\"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of images we have=\"+str(totalNumberOfImagesWeHave) if", "if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of images we have=\"+str(totalNumberOfImagesWeHave) if __name__ == \"__main__\":", "by which to connect to komp2 db' ) parser.add_argument('-u', '--user', dest='komp2User', help='Username for", "and parameter and observation id for the images \"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs']", "download urls and the data source, experiment, procdure and parameter and observation id", "core and then downloads the images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory for root", "= createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\")", "totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return", "images \"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name']", "experiment core and then downloads the images\"\"\" import os import requests import json", "parser.add_argument('-p', '--port', dest='komp2Port', help='Port by which to connect to komp2 db' ) parser.add_argument('-u',", "db' ) parser.add_argument('-u', '--user', dest='komp2User', help='Username for connecting to komp2 db' ) parser.add_argument('-db',", "downloadFilePath): global totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter,", "url over a few lines as puts in newlines into url which doesn't", "OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv): parser = argparse.ArgumentParser(", "\"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center']", "as defaults that can be overridden # by command line parameters try: pp", "to komp2 db' ) parser.add_argument('-u', '--user', dest='komp2User', help='Username for connecting to komp2 db'", "for connecting to komp2 db' ) parser.add_argument('-db', '--database', dest='komp2Db', help='Database to connect to", "connecting to komp2 db' ) parser.add_argument('-db', '--database', dest='komp2Db', help='Database to connect to for", "from which to read config: dev, prod, live, ...') args = parser.parse_args() #", "args.komp2Db<>None else omeroProps['komp2db'] komp2User = args.komp2User if args.komp2User<>None else omeroProps['komp2user'] komp2Pass = args.komp2Pass", "for doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx,", "= {} rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl <> None else omeroProps['solrurl'] komp2Host =", "args.komp2Host<>None else omeroProps['komp2host'] komp2Port = args.komp2Port if args.komp2Port<>None else omeroProps['komp2port'] komp2db = args.komp2Db", "mousephenotype uris) from the experiment core and then downloads the images' ) parser.add_argument('-d',", "pp.getOmeroProps() except: omeroProps = {} rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl <> None else", "pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id,", "host and db name etc for jenkins to run first get the list", "\"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths are /nfs/public/ro/pheno-archive-images/images/impc if", "IOError: print \"file does not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print", "\"file does not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 : print \"totalNumber of", "file paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error the filePath is", "phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath", "komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to get", "unique and has been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory for copy", "omeroProps['solrurl'] komp2Host = args.komp2Host if args.komp2Host<>None else omeroProps['komp2host'] komp2Port = args.komp2Port if args.komp2Port<>None", "from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv): parser =", "import sys import argparse import mysql.connector import shutil from common import splitString from", "help='Username for connecting to komp2 db' ) parser.add_argument('-db', '--database', dest='komp2Db', help='Database to connect", "db' ) parser.add_argument('-db', '--database', dest='komp2Db', help='Database to connect to for komp2db' ) parser.add_argument('--pass',", "komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev', help='profile from", "urls and the data source, experiment, procdure and parameter and observation id for", "#solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy script for impc images\") print 'rootDestinationDir is", "file to '+destPath if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError: print \"file does", "print(\"running python image copy script for impc images\") print 'rootDestinationDir is \"', rootDestinationDir", "need to get these passed in as arguments - the host and db", "totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv): parser = argparse.ArgumentParser( description='Get the download_file_paths (http mousephenotype", "to run first get the list of download urls and the data source,", "numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print", "which to read config: dev, prod, live, ...') args = parser.parse_args() # Get", "args.rootSolrUrl if args.rootSolrUrl <> None else omeroProps['solrurl'] komp2Host = args.komp2Host if args.komp2Host<>None else", "cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory", "import argparse import mysql.connector import shutil from common import splitString from database import", "<> None else omeroProps['solrurl'] komp2Host = args.komp2Host if args.komp2Host<>None else omeroProps['komp2host'] komp2Port =", "'--rootSolrUrl', dest='rootSolrUrl', help='URL to root of solr index' ) parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname", "in newlines into url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy", "download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id,", "#print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new", "been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory for copy is \"+destDirectory if", "as arguments - the host and db name etc for jenkins to run", "index' ) parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for server hosting komp2 db' ) parser.add_argument('-p',", "doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy script for impc images\") print", "download_file_paths (http mousephenotype uris) from the experiment core and then downloads the images'", "args.komp2User if args.komp2User<>None else omeroProps['komp2user'] komp2Pass = args.komp2Pass if args.komp2Pass<>None else omeroProps['komp2pass'] rootDestinationDir", "import json import sys import os.path import sys import argparse import mysql.connector import", "get these passed in as arguments - the host and db name etc", "root of destination to store images' ) parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to root", "doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id,", "'--host', dest='komp2Host', help='Hostname for server hosting komp2 db' ) parser.add_argument('-p', '--port', dest='komp2Port', help='Port", "common import splitString from database import getDbConnection from OmeroPropertiesParser import OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0", "parser.add_argument('-db', '--database', dest='komp2Db', help='Database to connect to for komp2db' ) parser.add_argument('--pass', dest='komp2Pass', help='Password", "filePath is not unique and has been specified before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination", "<PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev', help='profile from which to read config: dev, prod,", "root of solr index' ) parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for server hosting komp2", "for jenkins to run first get the list of download urls and the", "rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to get these passed in as arguments", "new file paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error the filePath", "print 'number found in solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total", "komp2User = args.komp2User if args.komp2User<>None else omeroProps['komp2user'] komp2Pass = args.komp2Pass if args.komp2Pass<>None else", ") parser.add_argument('--pass', dest='komp2Pass', help='Password for <PASSWORD>' ) parser.add_argument('--profile', dest='profile', default='dev', help='profile from which", "/nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error the filePath is not unique and", "#print \"destination directory for copy is \"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving", "#print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in", "cant split this url over a few lines as puts in newlines into", "= OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except: omeroProps = {} rootSolrUrl = args.rootSolrUrl if", "parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx, observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter,", "solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=(download_file_path:*mousephenotype.org*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\" print(\"running python image copy script for impc images\") print 'rootDestinationDir is \"',", "komp2 db' ) parser.add_argument('-db', '--database', dest='komp2Db', help='Database to connect to for komp2db' )", "and use as defaults that can be overridden # by command line parameters", "os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError: print \"file does not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1", "be overridden # by command line parameters try: pp = OmeroPropertiesParser(args.profile) omeroProps =", "the experiment core and then downloads the images' ) parser.add_argument('-d', '--rootDestinationDir', dest='rootDestinationDir', help='Directory", "which to connect to komp2 db' ) parser.add_argument('-u', '--user', dest='komp2User', help='Username for connecting", "impc images\") print 'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port,", "db name etc for jenkins to run first get the list of download", "data source, experiment, procdure and parameter and observation id for the images \"\"\"", "parameter and observation id for the images \"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound']", "uris) from the experiment core and then downloads the images\"\"\" import os import", "sys import argparse import mysql.connector import shutil from common import splitString from database", "parser.add_argument('-s', '--rootSolrUrl', dest='rootSolrUrl', help='URL to root of solr index' ) parser.add_argument('-H', '--host', dest='komp2Host',", "before:'+dstfilename uniqueUris.add(dstfilename) destDirectory=os.path.dirname(destPath) #print \"destination directory for copy is \"+destDirectory if not os.path.exists(destDirectory):", "is \"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file to '+destPath if not", "<filename>external_tools/src/main/python/images/CopyOnlyFilesSpecifiedInSolr.py #!/usr/bin/python \"\"\"this program gets the download_file_paths (http mousephenotype uris) from the experiment", "def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure, parameter, download_file_path): directory=\"/\".join([rootDestinationDir,phenotyping_center, pipeline_stable_id,procedure,parameter]) return directory def processFile(cnx,", "and the data source, experiment, procdure and parameter and observation id for the", "os.makedirs(destDirectory) #print 'saving file to '+destPath if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath) except IOError:", "found in solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit()", "in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id'] processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id,", "solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def", "that can be overridden # by command line parameters try: pp = OmeroPropertiesParser(args.profile)", "komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\" need to get these", "omeroProps = pp.getOmeroProps() except: omeroProps = {} rootSolrUrl = args.rootSolrUrl if args.rootSolrUrl <>", "totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath) #print", "and then downloads the images\"\"\" import os import requests import json import sys", "# Get values from property file and use as defaults that can be", "responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,", "rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant split this url over", "observation_id, rootDestinationDir, phenotyping_center,pipeline_stable_id, procedure, parameter, downloadFilePath): global totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps directory", "sys import os.path import sys import argparse import mysql.connector import shutil from common", "download_file_path) print 'number found in solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+'", "uniqueUris: print '---------------------!!!!!!!!!!error the filePath is not unique and has been specified before:'+dstfilename", "except IOError: print \"file does not exist \"+str(dstfilename)+\" continuing\" totalNumberOfImagesWeHave=totalNumberOfImagesWeHave+1 if totalNumberOfImagesWeHave%1000==0 :", "to root of solr index' ) parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for server hosting", "uniqueUris=set() def main(argv): parser = argparse.ArgumentParser( description='Get the download_file_paths (http mousephenotype uris) from", "the images\"\"\" import os import requests import json import sys import os.path import", "processFile(cnx, observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print 'number found in solr='+str(numFoundInSolr)+' number of", "import os.path import sys import argparse import mysql.connector import shutil from common import", "paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error the filePath is not", "\"+destDirectory if not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file to '+destPath if not os.path.isfile(destPath):", "dest='profile', default='dev', help='profile from which to read config: dev, prod, live, ...') args", "# new file paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error the", "args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant split this url over a few lines as", "OmeroPropertiesParser responseFailed=0 numberOfImageDownloadAttemps=0 totalNumberOfImagesWeHave=0 numFoundInSolr=0 uniqueUris=set() def main(argv): parser = argparse.ArgumentParser( description='Get the", "komp2db = args.komp2Db if args.komp2Db<>None else omeroProps['komp2db'] komp2User = args.komp2User if args.komp2User<>None else", "to komp2 db' ) parser.add_argument('-db', '--database', dest='komp2Db', help='Database to connect to for komp2db'", "else omeroProps['komp2pass'] rootDestinationDir = args.rootDestinationDir if args.rootDestinationDir<>None else omeroProps['rootdestinationdir'] #note cant split this", "a few lines as puts in newlines into url which doesn't work #solrQuery=\"\"\"experiment/select?q=observation_type:image_record&fq=download_file_path:(download_file_path:*bhjlk01.jax.org/images/IMPC_ALZ_001/*%20AND%20!download_file_path:*.mov)&fl=id,download_file_path,phenotyping_center,pipeline_stable_id,procedure_stable_id,datasource_name,parameter_stable_id&wt=json&indent=on&rows=10000000\"\"\"", "copy script for impc images\") print 'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl',", "Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file paths are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error", "requests import json import sys import os.path import sys import argparse import mysql.connector", "the list of download urls and the data source, experiment, procdure and parameter", "parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for server hosting komp2 db' ) parser.add_argument('-p', '--port', dest='komp2Port',", "list of download urls and the data source, experiment, procdure and parameter and", "numFoundInSolr=v['response']['numFound'] for doc in docs: download_file_path=doc['download_file_path'] datasource_id=doc['datasource_name'] phenotyping_center=doc['phenotyping_center'] #experiment=doc['experiment'] pipeline_stable_id=doc['pipeline_stable_id'] observation_id=doc['id'] procedure_stable_id=doc['procedure_stable_id'] parameter_stable_id=doc['parameter_stable_id']", "global totalNumberOfImagesWeHave global responseFailed global numberOfImageDownloadAttemps directory = createDestinationFilePath(rootDestinationDir, phenotyping_center, pipeline_stable_id, procedure,parameter, downloadFilePath)", "file and use as defaults that can be overridden # by command line", "pp = OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except: omeroProps = {} rootSolrUrl = args.rootSolrUrl", "'rootDestinationDir is \"', rootDestinationDir solrUrl=rootSolrUrl+solrQuery; print 'solrUrl', solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass)", "are /nfs/public/ro/pheno-archive-images/images/impc if dstfilename in uniqueUris: print '---------------------!!!!!!!!!!error the filePath is not unique", "os.path import sys import argparse import mysql.connector import shutil from common import splitString", ") parser.add_argument('-p', '--port', dest='komp2Port', help='Port by which to connect to komp2 db' )", "os import requests import json import sys import os.path import sys import argparse", "by command line parameters try: pp = OmeroPropertiesParser(args.profile) omeroProps = pp.getOmeroProps() except: omeroProps", "#note cant split this url over a few lines as puts in newlines", "= args.rootSolrUrl if args.rootSolrUrl <> None else omeroProps['solrurl'] komp2Host = args.komp2Host if args.komp2Host<>None", "solrUrl cnx=getDbConnection(komp2Host, komp2Port, komp2db, komp2User, komp2Pass) runWithSolrAsDataSource(solrUrl, cnx, rootDestinationDir) def runWithSolrAsDataSource(solrUrl,cnx, rootDestinationDir): \"\"\"", "solr='+str(numFoundInSolr)+' number of failed responses='+str(responseFailed)+' number of requests='+str(numberOfImageDownloadAttemps)+' total totalNumberOfImagesWeHave='+str(totalNumberOfImagesWeHave) cnx.commit() cnx.close() def", "not os.path.exists(destDirectory): os.makedirs(destDirectory) #print 'saving file to '+destPath if not os.path.isfile(destPath): try: shutil.copyfile(dstfilename,destPath)", "help='Hostname for server hosting komp2 db' ) parser.add_argument('-p', '--port', dest='komp2Port', help='Port by which", "solr index' ) parser.add_argument('-H', '--host', dest='komp2Host', help='Hostname for server hosting komp2 db' )", "help='Port by which to connect to komp2 db' ) parser.add_argument('-u', '--user', dest='komp2User', help='Username", "komp2 db' ) parser.add_argument('-u', '--user', dest='komp2User', help='Username for connecting to komp2 db' )", "if args.rootSolrUrl <> None else omeroProps['solrurl'] komp2Host = args.komp2Host if args.komp2Host<>None else omeroProps['komp2host']", "pipeline_stable_id, procedure,parameter, downloadFilePath) #print \"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC", "for the images \"\"\" v = json.loads(requests.get(solrUrl).text) docs=v['response']['docs'] numFoundInSolr=v['response']['numFound'] for doc in docs:", "uris) from the experiment core and then downloads the images' ) parser.add_argument('-d', '--rootDestinationDir',", "observation_id, rootDestinationDir,phenotyping_center,pipeline_stable_id, procedure_stable_id, parameter_stable_id, download_file_path) print 'number found in solr='+str(numFoundInSolr)+' number of failed", "\"directory \"+str(directory) dstfilename=directory+\"/\"+str(downloadFilePath.split('/')[-1]) #print \"dstfilename=\"+str(dstfilename) destPath=dstfilename.replace(\"/nfs/komp2/web/images/impc/\",\"/nfs/komp2/web/images/clean/impc/\") #print \"replaced=\"+destPath #/nfs/komp2/web/images/impc/MRC Harwell/HRWL_001/IMPC_XRY_001/IMPC_XRY_034_001/114182.dcm # new file" ]
[ "def test_event(status): assert status.event == 0 def test_speed(status): assert status.speed == 38.076 def", "test_rpm(status): assert status.rpm == 2300 def test_fuel(status): assert status.fuel == 49 def test_steer_angle(status):", "b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status):", "b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert", "def test_rpm(status): assert status.rpm == 2300 def test_fuel(status): assert status.fuel == 49 def", "0 def test_speed(status): assert status.speed == 38.076 def test_throttle(status): assert status.throttle == 0", "b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\"", "MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id == 30 def test_status(status): assert status.status == 0", "def test_time_off_leader(status): assert status.time_off_leader == 37.0 def test_event(status): assert status.event == 0 def", "pyraceview.messages import MsgRaceStatus raw = ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\"", "def test_throttle(status): assert status.throttle == 0 def test_brake(status): assert status.brake == 0 def", "b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status():", "b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\"", "<filename>tests/test_PerCarRaceStatusData.py import pytest from pyraceview.messages import MsgRaceStatus raw = ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\"", "from pyraceview.messages import MsgRaceStatus raw = ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\"", "status.tol_type == 1 def test_time_off_leader(status): assert status.time_off_leader == 37.0 def test_event(status): assert status.event", "49 def test_steer_angle(status): assert status.steer_angle == 0 def test_lap_fraction(status): assert status.lap_fraction == 0.5147", "status.event == 0 def test_speed(status): assert status.speed == 38.076 def test_throttle(status): assert status.throttle", "def status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id == 30 def test_status(status): assert", "assert status.event == 0 def test_speed(status): assert status.speed == 38.076 def test_throttle(status): assert", "== 0 def test_brake(status): assert status.brake == 0 def test_rpm(status): assert status.rpm ==", "b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\"", "assert status.rpm == 2300 def test_fuel(status): assert status.fuel == 49 def test_steer_angle(status): assert", "status.rpm == 2300 def test_fuel(status): assert status.fuel == 49 def test_steer_angle(status): assert status.steer_angle", "== 49 def test_steer_angle(status): assert status.steer_angle == 0 def test_lap_fraction(status): assert status.lap_fraction ==", "test_fuel(status): assert status.fuel == 49 def test_steer_angle(status): assert status.steer_angle == 0 def test_lap_fraction(status):", "b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\"", "status.throttle == 0 def test_brake(status): assert status.brake == 0 def test_rpm(status): assert status.rpm", "1 def test_time_off_leader(status): assert status.time_off_leader == 37.0 def test_event(status): assert status.event == 0", "def test_brake(status): assert status.brake == 0 def test_rpm(status): assert status.rpm == 2300 def", "@pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id == 30 def test_status(status):", "test_car_id(status): assert status.car_id == 30 def test_status(status): assert status.status == 0 def test_tol_type(status):", "0 def test_brake(status): assert status.brake == 0 def test_rpm(status): assert status.rpm == 2300", "def test_fuel(status): assert status.fuel == 49 def test_steer_angle(status): assert status.steer_angle == 0 def", "status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id == 30 def test_status(status): assert status.status", "assert status.speed == 38.076 def test_throttle(status): assert status.throttle == 0 def test_brake(status): assert", "import pytest from pyraceview.messages import MsgRaceStatus raw = ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\"", "raw = ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\"", "b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\"", "b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id ==", "assert status.car_id == 30 def test_status(status): assert status.status == 0 def test_tol_type(status): assert", "30 def test_status(status): assert status.status == 0 def test_tol_type(status): assert status.tol_type == 1", "test_tol_type(status): assert status.tol_type == 1 def test_time_off_leader(status): assert status.time_off_leader == 37.0 def test_event(status):", "def test_status(status): assert status.status == 0 def test_tol_type(status): assert status.tol_type == 1 def", "status.brake == 0 def test_rpm(status): assert status.rpm == 2300 def test_fuel(status): assert status.fuel", "status.car_id == 30 def test_status(status): assert status.status == 0 def test_tol_type(status): assert status.tol_type", "status.speed == 38.076 def test_throttle(status): assert status.throttle == 0 def test_brake(status): assert status.brake", "test_throttle(status): assert status.throttle == 0 def test_brake(status): assert status.brake == 0 def test_rpm(status):", "test_speed(status): assert status.speed == 38.076 def test_throttle(status): assert status.throttle == 0 def test_brake(status):", "pytest from pyraceview.messages import MsgRaceStatus raw = ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\"", ") @pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id == 30 def", "b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\"", "test_event(status): assert status.event == 0 def test_speed(status): assert status.speed == 38.076 def test_throttle(status):", "b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\"", "== 37.0 def test_event(status): assert status.event == 0 def test_speed(status): assert status.speed ==", "b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\"", "assert status.fuel == 49 def test_steer_angle(status): assert status.steer_angle == 0 def test_lap_fraction(status): assert", "def test_speed(status): assert status.speed == 38.076 def test_throttle(status): assert status.throttle == 0 def", "0 def test_tol_type(status): assert status.tol_type == 1 def test_time_off_leader(status): assert status.time_off_leader == 37.0", "status.fuel == 49 def test_steer_angle(status): assert status.steer_angle == 0 def test_lap_fraction(status): assert status.lap_fraction", "assert status.status == 0 def test_tol_type(status): assert status.tol_type == 1 def test_time_off_leader(status): assert", "\\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\"", "( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta'", "b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def", "== 30 def test_status(status): assert status.status == 0 def test_tol_type(status): assert status.tol_type ==", "assert status.brake == 0 def test_rpm(status): assert status.rpm == 2300 def test_fuel(status): assert", "38.076 def test_throttle(status): assert status.throttle == 0 def test_brake(status): assert status.brake == 0", "b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1] def", "b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture", "status.time_off_leader == 37.0 def test_event(status): assert status.event == 0 def test_speed(status): assert status.speed", "b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id == 30", "== 0 def test_speed(status): assert status.speed == 38.076 def test_throttle(status): assert status.throttle ==", "assert status.throttle == 0 def test_brake(status): assert status.brake == 0 def test_rpm(status): assert", "b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\"", "b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\"", "test_brake(status): assert status.brake == 0 def test_rpm(status): assert status.rpm == 2300 def test_fuel(status):", "= ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\"", "2300 def test_fuel(status): assert status.fuel == 49 def test_steer_angle(status): assert status.steer_angle == 0", "b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\"", "0 def test_rpm(status): assert status.rpm == 2300 def test_fuel(status): assert status.fuel == 49", "b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\"", "37.0 def test_event(status): assert status.event == 0 def test_speed(status): assert status.speed == 38.076", "b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1]", "== 1 def test_time_off_leader(status): assert status.time_off_leader == 37.0 def test_event(status): assert status.event ==", "b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\"", "test_time_off_leader(status): assert status.time_off_leader == 37.0 def test_event(status): assert status.event == 0 def test_speed(status):", "== 2300 def test_fuel(status): assert status.fuel == 49 def test_steer_angle(status): assert status.steer_angle ==", "import MsgRaceStatus raw = ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\"", "b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" )", "return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id == 30 def test_status(status): assert status.status ==", "b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status(): return", "status.status == 0 def test_tol_type(status): assert status.tol_type == 1 def test_time_off_leader(status): assert status.time_off_leader", "test_status(status): assert status.status == 0 def test_tol_type(status): assert status.tol_type == 1 def test_time_off_leader(status):", "== 38.076 def test_throttle(status): assert status.throttle == 0 def test_brake(status): assert status.brake ==", "b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\"", "b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\"", "def test_tol_type(status): assert status.tol_type == 1 def test_time_off_leader(status): assert status.time_off_leader == 37.0 def", "b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\"", "b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\"", "def test_car_id(status): assert status.car_id == 30 def test_status(status): assert status.status == 0 def", "assert status.tol_type == 1 def test_time_off_leader(status): assert status.time_off_leader == 37.0 def test_event(status): assert", "assert status.time_off_leader == 37.0 def test_event(status): assert status.event == 0 def test_speed(status): assert", "b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\"", "b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\" b\"\\x00\\xb5\\x00\\x00T\\x006\\xaa\\x05\\x00\\xa8\\x00\\x07\\xd1\\x90\\xfaC\" b\"\\x00\\x00`\\x00<\\x1c\\x05\\xa9q\\x90\\t/\\x8c\\xe8g\\x00\\x00\\x1c\\x00=>\" b'\\x053\\xf1\\x90\\x08\\x99\\x90\\xe2\\xd3\\x00\\x00\"\\x00?(\\x05jh\\x00\\ta' b\"\\x8c\\xdb\\xcf\\x00\\x00\\x18\\x00@\\xa6\\xe0\\x00\\x01\\x90\\x08g\\x90\\xd6\" b\"\\xe7\\x00\\x00\\x02\\x00A\\xec\\xe0\\x00\\x00\\x00\\x08\\x03\\x8c\\xd3\\xc9\" b\"\\x00\\x00R\\x00E\\xde\\x00\\x00\\x01\\x90\\ta\\x90\\xc3\\xa3\\x00\\x00\\xbe\" b\"\\x00If\\x00\\x00\\x00\\x00\\x08\\x99\\x8c\\xb9\\x9f\\x00\\x00\\x14\\x00p$\" b\"\\x00\\x00\\x01\\x90\\x08\\x99\\x8c\\x16\\xdb\\x00\\x00D\\x00\\xbeD\\x05\\x95!\" b\"\\x90\\x07\\x9e~\\x04Q\\x00\\x00,\\x00\\xc1\\xde\\x05\\xaf\\xd9\\x90\\x084}\" b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\"", "b\"\\xf7\\xd9\\x00\\x00L\\x00\\xdc\\x88\\x04\\x8e`\\x00\\x06@i\\x97\\x9b\\x00\" b\"\\x000\\x10\\x00\\x02\\x05\\x8d\\xa8\\x00\\x08\\x98~\\x15\\x0b\\x00\\x00\\xb0\" b\"\\x10\\x00\\x02\\x05\\xda\\x91\\x90\\x08\\x98}\\xf1\\xc7\\x00\\x00\\x06\\x10\" b\"\\x00\\x02\\x05\\xc3\\x10\\x00\\x05\\xaa}\\xeb\\x9b\\x00\\x00\\x1a\\x10\\x00\" b\"\\x02\\x85\\xb4!\\x90\\t\\xc4}\\xe0\\xa1\\x00\\x00H\\x10\\x00\\x02\\x05\\xb9y\" b\"\\x90\\n(}\\xd9\\xa3\\x00\\x00@\\x10\\x00\\x02\\x05\\xb0\\xe9\\x90\\x08\\x02m\" b\"\\xca\\xe7\\x00\\x00*\\x10\\x00\\x02\\x06\\x1b\\xb9\\x90\\t\\x92}\\xc0\\xc7\\x00\" b\"\\x00\\x10\\x10\\x00\\x02\\x04\\x8b1\\x90\\x07:}\\xaaE\\x00\\x00\\x01\\x10\\x00\" b\"\\x02\\x04\\xa5\\xe0\\x00\\x06@m\\x9es\\x00\\x00^\\x10\\x00\\x04\\x06\\x02\\x18\" b\"\\x00\\t`n\\x0e{\\x00\\x00J\\x10\\x00\\x04\\x05\\xb8a\\x90\\t.q\\xc6\\x9f\\x00\" b\"\\x00f\\x10\\x00\\x06\\x04\\xfbY\\x90\\x07:}\\xb2\\xc3\\x00\\x00h\\x10\\x00\\x08\" b\"\\x04\\xaeq\\x93&\\xa4q\\xa9\\xb9\\x00\\x00j\\x10\\x00\\n\\x05\\xb9x\\x00\\x07\" b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\"", "== 0 def test_tol_type(status): assert status.tol_type == 1 def test_time_off_leader(status): assert status.time_off_leader ==", "== 0 def test_rpm(status): assert status.rpm == 2300 def test_fuel(status): assert status.fuel ==", "b\"\\xd0u\\xfcm\\x00\\x006\\x10\\x00\\x0c\\x05\\x7f\\xc8\\x00\\t\\xc4u\\xd0\\xb7\" b\"\\x00\\x00\\x9a\\x10\\x00\\x0c\\x04\\x98\\x00\\x00\\x06@u\\x8b\\xdd\\x00\\x00\" b\"\\x1e\\x10\\x00J\\x04\\xa5\\xe0\\x00\\x04~\\xc5\\x92\\x1d\\x00\\x00\" ) @pytest.fixture def status(): return MsgRaceStatus(raw).car_data[-1] def test_car_id(status): assert status.car_id", "MsgRaceStatus raw = ( b\"\\xab\\xcd*T\\x02Cs\\x04\\x90\\x90[3\\x10\\xcc\\x89\\xb8V\\x00\\x00\" b\"\\x00\\x05q\\xe0\\x03 \\x00y\\x86\\xb9\\x00\\x00$\\x00\\x10\\x10\\x06\" b\"\\x0e\\xe8\\x00\\x06\\x0f\\x91k\\xe5\\x00\\x00&\\x00\\x15\\xce\\x05\\xb9x\" b\"\\x00\\x07\\t\\x8d^\\x8d\\x00\\x00\\x16\\x00\\x1c\\xc2\\x05\\xcb\\xa1\\x90\" b\"\\x05\\xdd\\x91L\\xb3\\x00\\x00\\x04\\x00!\\xece\\xb1\\xf9\\x90\\x07\\x9f\" b\"\\x91@\\xe5\\x00\\x00(\\x00%R\\x05P\\xc8\\x00\\x05G\\x8d2a\\x00\\x00\\x0c\" b\"\\x00'\\xe8\\x05d\\x01\\x90\\x07;\\x911i\\x00\\x00\\x08\\x00+z\\x05'!\" b\"\\x90\\x07m\\x91&S\\x00\\x00\\x12\\x005\\x08\\x05\\x1ba\\x90\\x07m\\x91\"" ]
[ "LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings", "newLine=False) : LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message, exception=None, muteStackTrace=False,", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO,", "= StringHelper.prettyPython( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue = nullValue, trueValue", "level=INFO, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin,", "printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin,", "LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message,", "OUTPUT_PRINT_LIST # try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST = [] #", "message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin,", "logging.error(it, **kwargs) # logging.log(msg=args[0], level=9) # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it)", "def loadSettings() : global LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log',", "quote = c.DOUBLE_QUOTE, tabCount = 0, nullValue = c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue", "wraper(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def", "global OUTPUT_PRINT_LIST # try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST = []", "message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin, message, level=INFO, exception=None, muteStackTrace=False, newLine=False) :", "True if 0 == len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint", "logging.log(msg=args[0], level=9) # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs) def", "falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel)", "{} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level) settings[level]", "# LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings()", "def printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin)", "message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin,", "message, level=LOG, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def", "c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin, message, dictionaryInstance, quote = c.DOUBLE_QUOTE,", "[] # except Exception as exception : # OUTPUT_PRINT_LIST = [] # #", "muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin, message, level=INFO, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message,", "newLine=False) : LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin, message, muteStackTrace=False, newLine=False)", "newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printTest(message,", "LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message, exception, muteStackTrace=False, newLine=False) :", "# logger.info(it) print(it, **kwargs) def loadSettings() : global LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE)", ": LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printSuccess(message, condition=False, muteStackTrace=False, newLine=True,", "FAILURE = 'FAILURE' ERROR = 'ERROR' TEST = 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from", "printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def", "exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin, message,", "LOG_HELPER_SETTINGS = settings # if PRINTING not in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] =", ": if ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString = str(exception) if c.NOTHING == exceptionAsString", "if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote =", "as c from python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG = 'LOG'", "settings[level] = status if not status is None else c.TRUE LOG_HELPER_SETTINGS = settings", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin,", "EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue =", "import asyncio # global OUTPUT_PRINT_LIST # PRINTING = 'PRINTING' # def loadLogger() :", "# shouldPrint = True if 0 == len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it, kwargs])", "newLine=False) : LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message, exception, muteStackTrace=False,", "stderr) def prettyJson( origin, message, dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount = 0, nullValue", ": LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarper(message, condition=False, muteStackTrace=False, newLine=True,", "message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message, exception=None,", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) :", "EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue =", "logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs) def loadSettings() : global LOG_HELPER_SETTINGS # logger =", "margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printFailure(message, condition=False,", "condition = True ) : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue", "c.BLANK try : tracebackMessage = traceback.format_exc() except : tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace", "# logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment()", "= 'INFO' SUCCESS = 'SUCCESS' SETTING = 'SETTING' DEBUG = 'DEBUG' WARNING =", "= 'SETTING' DEBUG = 'DEBUG' WARNING = 'WARNING' WRAPPER = 'WRAPPER' FAILURE =", "ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it, **kwargs) : # logging.error(it, **kwargs) # logging.log(msg=args[0], level=9)", "ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString = str(exception) if c.NOTHING == exceptionAsString : return", ": # return logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it, **kwargs) : #", "printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin,", "def loadLogger() : # global OUTPUT_PRINT_LIST # try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) :", "= 0, nullValue = c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel =", "exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK try : tracebackMessage = traceback.format_exc() except", "status = EnvironmentHelper.get(level) settings[level] = status if not status is None else c.TRUE", "from python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS # import asyncio # global OUTPUT_PRINT_LIST #", "= SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr)", "nullValue, trueValue = trueValue, falseValue = falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False", "= EnvironmentHelper.get(level) settings[level] = status if not status is None else c.TRUE LOG_HELPER_SETTINGS", "def warning(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception)", "newLine=newLine, margin=margin, exception=exception) def printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message,", "loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs) def loadSettings() : global LOG_HELPER_SETTINGS #", "tracebackMessage = traceback.format_exc() except : tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:],", ": # global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False # #", "newLine=newLine, exception=exception) def printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition,", "margin=margin, exception=exception) def printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace,", "kwargs]) # if shouldPrint : # printOutput() # import logging # LOGGER_INSTANCE =", "if SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin, message, level=LOG,", "newLine=False) : LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin, message, level=INFO, exception=None,", "trueValue = c.TRUE, falseValue = c.FALSE, logLevel = LOG, condition = True )", "def prettyJson( origin, message, dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount = 0, nullValue =", "'PRINTING' # def loadLogger() : # global OUTPUT_PRINT_LIST # try : # if", "LOGGER_INSTANCE = None # def loadLogger(logger) : # return logger if ObjectHelper.isNotNone(logger) else", "exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def prettyPython( origin, message,", "c from python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG = 'LOG' INFO", ": # OUTPUT_PRINT_LIST = [] # except Exception as exception : # OUTPUT_PRINT_LIST", "exception=exception) def warning(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine,", "def printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", ": # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "# asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it, **kwargs) : # global OUTPUT_PRINT_LIST # shouldPrint", ": LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message, exception=None, muteStackTrace=False, newLine=False)", "# except Exception as exception : # OUTPUT_PRINT_LIST = [] # # async", "exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message,", "= [] # except Exception as exception : # OUTPUT_PRINT_LIST = [] #", ": # global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "= nullValue, trueValue = trueValue, falseValue = falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn =", "exception=exception) def printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs) def loadSettings() : global", "# global OUTPUT_PRINT_LIST # try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST =", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin,", "# OUTPUT_PRINT_LIST = [] # except Exception as exception : # OUTPUT_PRINT_LIST =", "python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS # import asyncio # global OUTPUT_PRINT_LIST # PRINTING", "= c.NONE, trueValue = c.TRUE, falseValue = c.FALSE, logLevel = LOG, condition =", "c.FALSE, logLevel = LOG, condition = True ) : if condition : stdout,", "LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin,", "condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception)", "exceptionAsString : return ReflectionHelper.getName(exception.__class__) else : return exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage =", "ERROR = 'ERROR' TEST = 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper", "margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarper(message, condition=False,", "FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception,", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def prettyPython( origin, message, dictionaryInstance, quote =", "async def asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] =", "message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin,", "async def printOutput() : # global OUTPUT_PRINT_LIST # while 0 < len(OUTPUT_PRINT_LIST) :", ": LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin,", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True #", "for level in LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level) settings[level] = status if not", "newLine=newLine, margin=margin, exception=exception) def printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition,", "logIt(it, **kwargs) : # global OUTPUT_PRINT_LIST # shouldPrint = True if 0 ==", ": stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote = quote, tabCount", "# while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "# if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST = [] # except Exception as exception", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async def", "if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE) return LogHelperHelper.NO_TRACEBACK_PRESENT_MESSAGE if LogHelperHelper.NO_TRACEBACK_PRESENT == str(tracebackMessage) else", "logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin, message, dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount =", "c.DOUBLE_QUOTE, tabCount = 0, nullValue = c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue = c.FALSE_VALUE,", "SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin, message, level=LOG, exception=None,", "if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it, **kwargs) : # logging.error(it, **kwargs) # logging.log(msg=args[0],", "prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue = nullValue,", ": # logging.error(it, **kwargs) # logging.log(msg=args[0], level=9) # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG)", "tabCount = 0, nullValue = c.NONE, trueValue = c.TRUE, falseValue = c.FALSE, logLevel", "def printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "success(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message,", "status if not status is None else c.TRUE LOG_HELPER_SETTINGS = settings # if", "logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings = {}", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST,", "condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception)", "return c.UNKNOWN exceptionAsString = str(exception) if c.NOTHING == exceptionAsString : return ReflectionHelper.getName(exception.__class__) else", "TEST = 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS #", "= status if not status is None else c.TRUE LOG_HELPER_SETTINGS = settings #", "c.NOTHING == exceptionAsString : return ReflectionHelper.getName(exception.__class__) else : return exceptionAsString def getTracebackMessage(muteStackTrace) :", "exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message,", "tabCount = 0, nullValue = c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel", "= falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]),", ": return c.UNKNOWN exceptionAsString = str(exception) if c.NOTHING == exceptionAsString : return ReflectionHelper.getName(exception.__class__)", "= c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel = LOG, condition =", "muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception,", "LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) :", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None)", "exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message,", "prettyPython( origin, message, dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount = 0, nullValue = c.NONE,", "= 'SUCCESS' SETTING = 'SETTING' DEBUG = 'DEBUG' WARNING = 'WARNING' WRAPPER =", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it, **kwargs) : # logging.error(it, **kwargs) #", "else : return exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK try : tracebackMessage", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) :", "newLine=newLine, margin=margin) def printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace,", "'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS # import asyncio", "muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message, exception,", "condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message,", "= False # # async def printOutput() : # global OUTPUT_PRINT_LIST # while", "tracebackMessage = c.BLANK try : tracebackMessage = traceback.format_exc() except : tracebackMessage = f'{c.NEW_LINE}'", "WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception,", "margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printSuccess(message, condition=False,", "= True ) : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue =", "prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue = nullValue,", "False if SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin, message,", "'FAILURE' ERROR = 'ERROR' TEST = 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper import", "printOutput() # import logging # LOGGER_INSTANCE = None # def loadLogger(logger) : #", "False # # async def printOutput() : # global OUTPUT_PRINT_LIST # while 0", "= c.DOUBLE_QUOTE, tabCount = 0, nullValue = c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue =", "= 'PRINTING' # def loadLogger() : # global OUTPUT_PRINT_LIST # try : #", "LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS,", "margin=margin) def printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace,", "muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def", "= EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE,", "muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message, muteStackTrace=False, newLine=False)", "exception=exception) def printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace,", "muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message, condition=False, muteStackTrace=False,", "stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote = quote, tabCount =", "message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SETTING,", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) :", "falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel)", "= False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) :", "margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message, condition=False, muteStackTrace=False, newLine=True,", "StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) : return", "tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE) return LogHelperHelper.NO_TRACEBACK_PRESENT_MESSAGE if LogHelperHelper.NO_TRACEBACK_PRESENT", "LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def", "withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout,", "margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printTest(message, condition=False,", "loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] =", "from python_helper.api.src.domain import Constant as c from python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper,", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def prettyPython( origin, message, dictionaryInstance, quote = c.SINGLE_QUOTE,", "falseValue = c.FALSE_VALUE, logLevel = LOG, condition = True ) : if condition", "# print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async def asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS #", "quote = quote, tabCount = tabCount, nullValue = nullValue, trueValue = trueValue, falseValue", "printOutput() : # global OUTPUT_PRINT_LIST # while 0 < len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0)))", "newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarning(message,", "condition = True ) : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) :", "= colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS # import asyncio # global", "# # async def asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] :", ": tracebackMessage = traceback.format_exc() except : tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace : return", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "# logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings =", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None)", "return logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it, **kwargs) : # logging.error(it, **kwargs)", "= loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs) def loadSettings() : global LOG_HELPER_SETTINGS", "LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin, message, level=INFO, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin,", "colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY : status", "*prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString", "condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote = quote,", "tabCount, nullValue = nullValue, trueValue = trueValue, falseValue = falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(),", "LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True,", "c.NONE, trueValue = c.TRUE, falseValue = c.FALSE, logLevel = LOG, condition = True", "margin=margin) def printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "'WARNING' WRAPPER = 'WRAPPER' FAILURE = 'FAILURE' ERROR = 'ERROR' TEST = 'TEST'", "logLevel = LOG, condition = True ) : if condition : stdout, stderr", "= trueValue, falseValue = falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin,", "= {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level)", "INFO = 'INFO' SUCCESS = 'SUCCESS' SETTING = 'SETTING' DEBUG = 'DEBUG' WARNING", "python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG = 'LOG' INFO = 'INFO'", "[] # # async def asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING]", "LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def prettyPython( origin, message, dictionaryInstance, quote", "exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printInfo(message, condition=False, muteStackTrace=False,", "def success(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin,", "= c.BLANK try : tracebackMessage = traceback.format_exc() except : tracebackMessage = f'{c.NEW_LINE}' if", "import logging # LOGGER_INSTANCE = None # def loadLogger(logger) : # return logger", "message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) :", "# def logIt(it, **kwargs) : # global OUTPUT_PRINT_LIST # shouldPrint = True if", "loadLogger(logger) : # return logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it, **kwargs) :", "LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin, message, level=INFO, exception=None, muteStackTrace=False, newLine=False)", "asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it, **kwargs) : # global OUTPUT_PRINT_LIST # shouldPrint =", "message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin,", "= SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level) settings[level] = status", "if ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString = str(exception) if c.NOTHING == exceptionAsString :", "0 == len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint : #", "# def loadLogger(logger) : # return logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it,", "# logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin, message, level=LOG, exception=None, muteStackTrace=False, newLine=False) :", "def printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "'SUCCESS' SETTING = 'SETTING' DEBUG = 'DEBUG' WARNING = 'WARNING' WRAPPER = 'WRAPPER'", "exception=exception) def info(origin, message, level=INFO, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace,", "message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message,", "awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def", "def loadLogger(logger) : # return logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it, **kwargs)", ": # global OUTPUT_PRINT_LIST # shouldPrint = True if 0 == len(OUTPUT_PRINT_LIST) else", "exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message, condition=False,", "def printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "margin=margin, exception=exception) def printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition,", "info(origin, message, level=INFO, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception)", ": return exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK try : tracebackMessage =", "error(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None)", ": # global OUTPUT_PRINT_LIST # while 0 < len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) #", "message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin,", "message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message,", ": # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it, **kwargs) : # global OUTPUT_PRINT_LIST #", ": LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message, exception, muteStackTrace=False, newLine=False)", "def printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def", "# logging.error(it, **kwargs) # logging.log(msg=args[0], level=9) # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) #", ": stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote = quote, tabCount", "in LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level) settings[level] = status if not status is", "def failure(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine)", "= falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]),", "ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST = [] # except Exception as exception : #", "'DEBUG' WARNING = 'WARNING' WRAPPER = 'WRAPPER' FAILURE = 'FAILURE' ERROR = 'ERROR'", "# PRINTING = 'PRINTING' # def loadLogger() : # global OUTPUT_PRINT_LIST # try", "SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level) settings[level] = status if", "newLine=newLine, margin=margin) def printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition,", "logging # LOGGER_INSTANCE = None # def loadLogger(logger) : # return logger if", "exception=exception) def printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace,", "getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK try : tracebackMessage = traceback.format_exc() except : tracebackMessage", "None else c.TRUE LOG_HELPER_SETTINGS = settings # if PRINTING not in LOG_HELPER_SETTINGS :", "global LOG_HELPER_SETTINGS # import asyncio # global OUTPUT_PRINT_LIST # PRINTING = 'PRINTING' #", "asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "EnvironmentHelper.get(level) settings[level] = status if not status is None else c.TRUE LOG_HELPER_SETTINGS =", "def setting(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin,", "muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def", "newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printFailure(message,", "nullValue = c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel = LOG, condition", "quote = c.SINGLE_QUOTE, tabCount = 0, nullValue = c.NONE, trueValue = c.TRUE, falseValue", "margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printError(message, condition=False,", "global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) :", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # #", "logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level in", "# global OUTPUT_PRINT_LIST # shouldPrint = True if 0 == len(OUTPUT_PRINT_LIST) else False", "def printOutput() : # global OUTPUT_PRINT_LIST # while 0 < len(OUTPUT_PRINT_LIST) : #", "newLine=newLine, margin=margin, exception=exception) def printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message,", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def prettyPython( origin, message, dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount", "= tabCount, nullValue = nullValue, trueValue = trueValue, falseValue = falseValue, withColors =", "logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin, message, level=LOG, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin,", "falseValue = c.FALSE, logLevel = LOG, condition = True ) : if condition", "margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printInfo(message, condition=False,", "False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin, message,", "message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin,", "print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async def asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # await", "RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS # import asyncio #", "OUTPUT_PRINT_LIST # shouldPrint = True if 0 == len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it,", "newLine=newLine, exception=exception) def warning(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace,", "LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True)", "False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) : if", "# LOG_HELPER_SETTINGS[PRINTING] = False # # async def printOutput() : # global OUTPUT_PRINT_LIST", "muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def", "0, nullValue = c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel = LOG,", "message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message,", "newLine=newLine, margin=margin, exception=exception) def printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message,", "print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True,", "newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printError(message,", "LOG_HELPER_SETTINGS # import asyncio # global OUTPUT_PRINT_LIST # PRINTING = 'PRINTING' # def", "while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "asyncio # global OUTPUT_PRINT_LIST # PRINTING = 'PRINTING' # def loadLogger() : #", "condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception)", "= False if SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin,", "getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString = str(exception) if c.NOTHING ==", "# # async def printOutput() : # global OUTPUT_PRINT_LIST # while 0 <", "muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message, exception=None, muteStackTrace=False,", "Constant as c from python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG =", "WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message,", "traceback from python_helper.api.src.domain import Constant as c from python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper,", "muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, ERROR,", "condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception)", "if shouldPrint : # printOutput() # import logging # LOGGER_INSTANCE = None #", "# while 0 < len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it, **kwargs)", "SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG = 'LOG' INFO = 'INFO' SUCCESS =", "def getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK try : tracebackMessage = traceback.format_exc() except :", "return ReflectionHelper.getName(exception.__class__) else : return exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK try", "exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printTest(message, condition=False, muteStackTrace=False,", ": tracebackMessage = c.BLANK try : tracebackMessage = traceback.format_exc() except : tracebackMessage =", "# logging.log(msg=args[0], level=9) # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs)", "LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True,", "withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout,", "# try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST = [] # except", "prettyJson( origin, message, dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount = 0, nullValue = c.NULL_VALUE,", "debug(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def", ": if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote", "newLine=newLine) def error(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace,", "muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE) return LogHelperHelper.NO_TRACEBACK_PRESENT_MESSAGE if LogHelperHelper.NO_TRACEBACK_PRESENT == str(tracebackMessage) else tracebackMessage", "= 'ERROR' TEST = 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper global", "EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin, message, dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount = 0,", "margin=margin, exception=exception) def printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition,", "logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for", "= c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel = LOG, condition = True ) :", "muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin, message, exception=None,", ": LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarning(message, condition=False, muteStackTrace=False, newLine=True,", "if PRINTING not in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal() :", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) :", "python_helper.api.src.domain import Constant as c from python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper", "trueValue = trueValue, falseValue = falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False )", "margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def prettyPython( origin,", "ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, TEST,", "'ERROR' TEST = 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS", "exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarper(message, condition=False, muteStackTrace=False,", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------", "muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message, condition=False,", "def test(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception)", "0 < len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it, **kwargs) : #", "encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY", "== len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint : # printOutput()", "printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin,", "logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin, message, level=LOG, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message,", "setting(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message,", "except Exception as exception : # OUTPUT_PRINT_LIST = [] # # async def", "LOG = 'LOG' INFO = 'INFO' SUCCESS = 'SUCCESS' SETTING = 'SETTING' DEBUG", "colorama, traceback from python_helper.api.src.domain import Constant as c from python_helper.api.src.service import SettingHelper, StringHelper,", "False # OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint : # printOutput() # import logging", "muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def", "newLine=newLine, margin=margin, exception=exception) def prettyPython( origin, message, dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount =", "EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString = str(exception)", "import colorama, traceback from python_helper.api.src.domain import Constant as c from python_helper.api.src.service import SettingHelper,", "margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message, condition=False, muteStackTrace=False, newLine=True,", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarper(message,", "try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST = [] # except Exception", ": # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST = [] # except Exception as", "import LogHelperHelper global LOG_HELPER_SETTINGS # import asyncio # global OUTPUT_PRINT_LIST # PRINTING =", "muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message, condition=False,", "muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, WARNING,", "newLine=newLine, margin=margin, exception=exception) def printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message,", "message, dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount = 0, nullValue = c.NONE, trueValue =", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS,", "trueValue, falseValue = falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message,", "not status is None else c.TRUE LOG_HELPER_SETTINGS = settings # if PRINTING not", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition,", ": # global OUTPUT_PRINT_LIST # try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True) :", "**kwargs) : # logging.error(it, **kwargs) # logging.log(msg=args[0], level=9) # logger = loadLogger(LOGGER_INSTANCE) #", "LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message, exception=None, muteStackTrace=False, newLine=False) :", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') #", "await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False # # async def printOutput() : #", "else False # OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint : # printOutput() # import", "condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception)", "muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin, message, exception,", "printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin,", "muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin, message, muteStackTrace=False,", "= c.FALSE_VALUE, logLevel = LOG, condition = True ) : if condition :", "newLine=newLine, exception=exception) def wraper(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, WRAPPER,", "status is None else c.TRUE LOG_HELPER_SETTINGS = settings # if PRINTING not in", "c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) : return c.UNKNOWN", "# import asyncio # global OUTPUT_PRINT_LIST # PRINTING = 'PRINTING' # def loadLogger()", "c.FALSE_VALUE, logLevel = LOG, condition = True ) : if condition : stdout,", "LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message, exception, muteStackTrace=False, newLine=False) :", "if not status is None else c.TRUE LOG_HELPER_SETTINGS = settings # if PRINTING", ": global LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG)", ": LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printError(message, condition=False, muteStackTrace=False, newLine=True,", "c.SINGLE_QUOTE, tabCount = 0, nullValue = c.NONE, trueValue = c.TRUE, falseValue = c.FALSE,", "if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote =", "StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG = 'LOG' INFO = 'INFO' SUCCESS = 'SUCCESS'", "logging.getLogger(__name__) def logIt(it, **kwargs) : # logging.error(it, **kwargs) # logging.log(msg=args[0], level=9) # logger", "EnvironmentHelper, ObjectHelper, ReflectionHelper LOG = 'LOG' INFO = 'INFO' SUCCESS = 'SUCCESS' SETTING", "LOG_HELPER_SETTINGS[PRINTING] = False # # async def printOutput() : # global OUTPUT_PRINT_LIST #", "global LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit()", "warning(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def", "c.NULL_VALUE, trueValue = c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel = LOG, condition = True", "newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printInfo(message,", "True ) : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython(", "exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message,", "= None # def loadLogger(logger) : # return logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__)", ": LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True,", "tabCount = tabCount, nullValue = nullValue, trueValue = trueValue, falseValue = falseValue, withColors", "LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True) :", "= settings # if PRINTING not in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] = False", "SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG,", "exception=exception) def printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace,", "loadLogger() : # global OUTPUT_PRINT_LIST # try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : #", "margin=margin, exception=exception) def printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition,", "TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG,", ": LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin, message, muteStackTrace=False, newLine=False) :", "'LOG' INFO = 'INFO' SUCCESS = 'SUCCESS' SETTING = 'SETTING' DEBUG = 'DEBUG'", "# OUTPUT_PRINT_LIST = [] # # async def asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS", "origin, message, dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount = 0, nullValue = c.NULL_VALUE, trueValue", "= LOG, condition = True ) : if condition : stdout, stderr =", "OUTPUT_PRINT_LIST = [] # # async def asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS #", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message, exception=None,", "if c.NOTHING == exceptionAsString : return ReflectionHelper.getName(exception.__class__) else : return exceptionAsString def getTracebackMessage(muteStackTrace)", "message, level=INFO, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def", "# async def asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] : #", "print(it, **kwargs) def loadSettings() : global LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG)", "printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin,", "= str(exception) if c.NOTHING == exceptionAsString : return ReflectionHelper.getName(exception.__class__) else : return exceptionAsString", "exception : # OUTPUT_PRINT_LIST = [] # # async def asyncAsyncPrintIt(itArgsAndKwargs) : #", "exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printFailure(message, condition=False, muteStackTrace=False,", ": if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote", "dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount = 0, nullValue = c.NONE, trueValue = c.TRUE,", "stderr) def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString = str(exception) if", "def printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin)", "dictionaryInstance, quote = quote, tabCount = tabCount, nullValue = nullValue, trueValue = trueValue,", "= True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async def asyncPrintIt(itArgsAndKwargs) : # global", "exceptionAsString = str(exception) if c.NOTHING == exceptionAsString : return ReflectionHelper.getName(exception.__class__) else : return", "muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace,", "ReflectionHelper LOG = 'LOG' INFO = 'INFO' SUCCESS = 'SUCCESS' SETTING = 'SETTING'", "printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin,", "not in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal() : colorama.init() #", "import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG = 'LOG' INFO = 'INFO' SUCCESS", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING]", "'SETTING' DEBUG = 'DEBUG' WARNING = 'WARNING' WRAPPER = 'WRAPPER' FAILURE = 'FAILURE'", "# global OUTPUT_PRINT_LIST # PRINTING = 'PRINTING' # def loadLogger() : # global", "try : tracebackMessage = traceback.format_exc() except : tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace :", "**kwargs) def loadSettings() : global LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###-", "settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY : status =", "condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception)", "from python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG = 'LOG' INFO =", ") : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance,", "# global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", ": LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message, exception=None, muteStackTrace=False, newLine=False) :", "OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint : # printOutput() # import logging # LOGGER_INSTANCE", "margin=margin, exception=exception) def printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition,", "newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message, condition=False, muteStackTrace=False,", "exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin, message,", "PRINTING not in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal() : colorama.init()", "LOG, condition = True ) : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus()", "# import logging # LOGGER_INSTANCE = None # def loadLogger(logger) : # return", "'INFO' SUCCESS = 'SUCCESS' SETTING = 'SETTING' DEBUG = 'DEBUG' WARNING = 'WARNING'", "dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount = 0, nullValue = c.NULL_VALUE, trueValue = c.TRUE_VALUE,", "condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote = quote,", "Exception as exception : # OUTPUT_PRINT_LIST = [] # # async def asyncAsyncPrintIt(itArgsAndKwargs)", "except : tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE) return LogHelperHelper.NO_TRACEBACK_PRESENT_MESSAGE", "len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint : # printOutput() #", "= False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin,", "< len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it, **kwargs) : # global", "exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printSuccess(message, condition=False, muteStackTrace=False,", "LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False # # async def printOutput()", "message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message, muteStackTrace=False,", "def wraper(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace, newLine=newLine)", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING,", "printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin,", "muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def", "muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin, message, level=INFO,", ": status = EnvironmentHelper.get(level) settings[level] = status if not status is None else", "def error(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine)", "exception=exception) def prettyPython( origin, message, dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount = 0, nullValue", "= c.SINGLE_QUOTE, tabCount = 0, nullValue = c.NONE, trueValue = c.TRUE, falseValue =", "newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message, muteStackTrace=False, newLine=False) :", "in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG)", "message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin,", "= 'WRAPPER' FAILURE = 'FAILURE' ERROR = 'ERROR' TEST = 'TEST' RESET_ALL_COLORS =", ") LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception)", "= [] # # async def asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # while", "------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "def logIt(it, **kwargs) : # logging.error(it, **kwargs) # logging.log(msg=args[0], level=9) # logger =", ": LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True)", "###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level", "falseValue = falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE,", "# LOGGER_INSTANCE = None # def loadLogger(logger) : # return logger if ObjectHelper.isNotNone(logger)", "StringHelper.prettyJson( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue = nullValue, trueValue =", ": # LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal() : colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING)", "= True if 0 == len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it, kwargs]) # if", ": LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printInfo(message, condition=False, muteStackTrace=False, newLine=True,", "True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async def asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS", ") : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance,", "= SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr)", "if ObjectHelper.isNone(OUTPUT_PRINT_LIST) : # OUTPUT_PRINT_LIST = [] # except Exception as exception :", "# if PRINTING not in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] = False if SettingHelper.activeEnvironmentIsLocal()", "LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "= quote, tabCount = tabCount, nullValue = nullValue, trueValue = trueValue, falseValue =", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async def asyncPrintIt(itArgsAndKwargs)", "exception=exception) def printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace,", "exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printError(message, condition=False, muteStackTrace=False,", "# OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint : # printOutput() # import logging #", "= StringHelper.prettyJson( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue = nullValue, trueValue", "# return logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def logIt(it, **kwargs) : # logging.error(it,", ": LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def prettyPython( origin, message, dictionaryInstance,", ": colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin, message, level=LOG, exception=None, muteStackTrace=False,", "message, dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount = 0, nullValue = c.NULL_VALUE, trueValue =", "LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin, message, exception, muteStackTrace=False, newLine=False) :", "= 'WARNING' WRAPPER = 'WRAPPER' FAILURE = 'FAILURE' ERROR = 'ERROR' TEST =", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None)", ": # printOutput() # import logging # LOGGER_INSTANCE = None # def loadLogger(logger)", "global OUTPUT_PRINT_LIST # shouldPrint = True if 0 == len(OUTPUT_PRINT_LIST) else False #", "**kwargs) # logging.log(msg=args[0], level=9) # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it) print(it,", "StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin, message, dictionaryInstance, quote =", "LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "# async def printOutput() : # global OUTPUT_PRINT_LIST # while 0 < len(OUTPUT_PRINT_LIST)", "shouldPrint : # printOutput() # import logging # LOGGER_INSTANCE = None # def", "else logging.getLogger(__name__) def logIt(it, **kwargs) : # logging.error(it, **kwargs) # logging.log(msg=args[0], level=9) #", "None # def loadLogger(logger) : # return logger if ObjectHelper.isNotNone(logger) else logging.getLogger(__name__) def", "newLine=newLine) def debug(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine,", "exception=exception) def printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace,", "= f'{c.NEW_LINE}' if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE) return LogHelperHelper.NO_TRACEBACK_PRESENT_MESSAGE if LogHelperHelper.NO_TRACEBACK_PRESENT ==", "len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it, **kwargs) : # global OUTPUT_PRINT_LIST", "newLine=newLine) def setting(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def", "nullValue = nullValue, trueValue = trueValue, falseValue = falseValue, withColors = SettingHelper.activeEnvironmentIsLocal(), joinAtReturn", "SUCCESS = 'SUCCESS' SETTING = 'SETTING' DEBUG = 'DEBUG' WARNING = 'WARNING' WRAPPER", "muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message, exception,", "f'{c.NEW_LINE}' if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE) return LogHelperHelper.NO_TRACEBACK_PRESENT_MESSAGE if LogHelperHelper.NO_TRACEBACK_PRESENT == str(tracebackMessage)", "= traceback.format_exc() except : tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE)", "ObjectHelper, ReflectionHelper LOG = 'LOG' INFO = 'INFO' SUCCESS = 'SUCCESS' SETTING =", "OUTPUT_PRINT_LIST # PRINTING = 'PRINTING' # def loadLogger() : # global OUTPUT_PRINT_LIST #", "if 0 == len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it, kwargs]) # if shouldPrint :", "return exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK try : tracebackMessage = traceback.format_exc()", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------')", "newLine=False) : LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin, message, exception, muteStackTrace=False,", "newLine=False) : LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message, exception=None, muteStackTrace=False, newLine=False)", "exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarning(message, condition=False, muteStackTrace=False,", "origin, message, dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount = 0, nullValue = c.NONE, trueValue", "quote, tabCount = tabCount, nullValue = nullValue, trueValue = trueValue, falseValue = falseValue,", "exception=exception) def success(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def", "'WRAPPER' FAILURE = 'FAILURE' ERROR = 'ERROR' TEST = 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL", "newLine=newLine) def test(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine,", "newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printSuccess(message,", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) #", "= 'DEBUG' WARNING = 'WARNING' WRAPPER = 'WRAPPER' FAILURE = 'FAILURE' ERROR =", "WARNING = 'WARNING' WRAPPER = 'WRAPPER' FAILURE = 'FAILURE' ERROR = 'ERROR' TEST", ": # OUTPUT_PRINT_LIST = [] # # async def asyncAsyncPrintIt(itArgsAndKwargs) : # global", ": LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message, exception, muteStackTrace=False, newLine=False)", "c.UNKNOWN exceptionAsString = str(exception) if c.NOTHING == exceptionAsString : return ReflectionHelper.getName(exception.__class__) else :", "def printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False # # async def", "loadSettings() : global LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8',", "SettingHelper.activeEnvironmentIsLocal(), joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True", "def log(origin, message, level=LOG, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine,", "DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message,", "LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True,", "# printOutput() # import logging # LOGGER_INSTANCE = None # def loadLogger(logger) :", "colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS # import asyncio # global OUTPUT_PRINT_LIST", "**kwargs) : # global OUTPUT_PRINT_LIST # shouldPrint = True if 0 == len(OUTPUT_PRINT_LIST)", "def info(origin, message, level=INFO, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, INFO, muteStackTrace=muteStackTrace, newLine=newLine,", "colorama.init() # logging.basicConfig(level=logging.DEBUG) logIt(RESET_ALL_COLORS, end=c.NOTHING) loadSettings() def log(origin, message, level=LOG, exception=None, muteStackTrace=False, newLine=False)", "exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin, message,", ": return ReflectionHelper.getName(exception.__class__) else : return exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK", "settings # if PRINTING not in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING] = False if", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message,", "import Constant as c from python_helper.api.src.service import SettingHelper, StringHelper, EnvironmentHelper, ObjectHelper, ReflectionHelper LOG", "LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None)", "end=c.NOTHING) loadSettings() def log(origin, message, level=LOG, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, LOG,", "# print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None)", "condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WARNING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception)", "# global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False # # async", "asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False # # async def printOutput() : # global", "level=LOG, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin,", "*prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin, message, dictionaryInstance, quote = c.DOUBLE_QUOTE, tabCount", ": LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printTest(message, condition=False, muteStackTrace=False, newLine=True,", "newLine=newLine) def failure(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace,", "= c.FALSE, logLevel = LOG, condition = True ) : if condition :", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1])", "newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message, condition=False, muteStackTrace=False,", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None)", "LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None)", "printTest(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin,", "else c.TRUE LOG_HELPER_SETTINGS = settings # if PRINTING not in LOG_HELPER_SETTINGS : #", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "newLine=newLine, margin=margin, exception=exception) def printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message,", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarning(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) :", "INFO, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS,", "PRINTING = 'PRINTING' # def loadLogger() : # global OUTPUT_PRINT_LIST # try :", "def debug(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception)", "# # async def asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) #", "margin=margin, exception=exception) def printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition,", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message,", "exception=exception) def printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace,", "= 'FAILURE' ERROR = 'ERROR' TEST = 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper", "muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine)", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR,", "LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level) settings[level] = status if not status is None", "stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote = quote, tabCount =", "c.TRUE LOG_HELPER_SETTINGS = settings # if PRINTING not in LOG_HELPER_SETTINGS : # LOG_HELPER_SETTINGS[PRINTING]", "newLine=False) : LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message, condition=False, muteStackTrace=False, newLine=True,", "= 0, nullValue = c.NONE, trueValue = c.TRUE, falseValue = c.FALSE, logLevel =", "asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False #", "# if shouldPrint : # printOutput() # import logging # LOGGER_INSTANCE = None", "test(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, TEST, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def", "LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printError(message, condition=False, muteStackTrace=False, newLine=True, margin=True,", "global OUTPUT_PRINT_LIST # PRINTING = 'PRINTING' # def loadLogger() : # global OUTPUT_PRINT_LIST", "shouldPrint = True if 0 == len(OUTPUT_PRINT_LIST) else False # OUTPUT_PRINT_LIST.append([it, kwargs]) #", "# LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async def asyncPrintIt(itArgsAndKwargs) :", "LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin, message, exception=None, muteStackTrace=False, newLine=False) :", ") LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin, message, dictionaryInstance,", "as exception : # OUTPUT_PRINT_LIST = [] # # async def asyncAsyncPrintIt(itArgsAndKwargs) :", "= c.TRUE, falseValue = c.FALSE, logLevel = LOG, condition = True ) :", "True ) : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson(", "newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin, message, exception=None, muteStackTrace=False,", ": LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def info(origin, message, level=INFO, exception=None, muteStackTrace=False,", "LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async def asyncPrintIt(itArgsAndKwargs) : #", "# global OUTPUT_PRINT_LIST # while 0 < len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # #", "loadSettings() def log(origin, message, level=LOG, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace,", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "def printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING,", ": LogHelperHelper.printMessageLog(WRAPPER, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printFailure(message, condition=False, muteStackTrace=False, newLine=True,", ": LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def warning(origin, message, exception=None, muteStackTrace=False, newLine=False)", "logger.info(it) print(it, **kwargs) def loadSettings() : global LOG_HELPER_SETTINGS # logger = loadLogger(LOGGER_INSTANCE) #", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting", "LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin,", "traceback.format_exc() except : tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE) return", "newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def prettyPython(", "global OUTPUT_PRINT_LIST # while 0 < len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def", "0, nullValue = c.NONE, trueValue = c.TRUE, falseValue = c.FALSE, logLevel = LOG,", "logIt(it, **kwargs) : # logging.error(it, **kwargs) # logging.log(msg=args[0], level=9) # logger = loadLogger(LOGGER_INSTANCE)", "c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel = LOG, condition = True ) : if", "margin=margin, exception=exception) def prettyPython( origin, message, dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount = 0,", "# logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs) def loadSettings() : global LOG_HELPER_SETTINGS # logger", "def asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') #", "joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson(", "StringHelper.prettyPython( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue = nullValue, trueValue =", "is None else c.TRUE LOG_HELPER_SETTINGS = settings # if PRINTING not in LOG_HELPER_SETTINGS", "stderr = EnvironmentHelper.getCurrentSoutStatus() prettyPythonValue = StringHelper.prettyPython( dictionaryInstance, quote = quote, tabCount = tabCount,", "trueValue = c.TRUE_VALUE, falseValue = c.FALSE_VALUE, logLevel = LOG, condition = True )", "level=9) # logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs) def loadSettings()", "muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(TEST, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def", "failure(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def", "level=logging.DEBUG) colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY :", "WRAPPER = 'WRAPPER' FAILURE = 'FAILURE' ERROR = 'ERROR' TEST = 'TEST' RESET_ALL_COLORS", "margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarning(message, condition=False,", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printSetting(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message,", "# async def asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING]", "muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER,", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0], **itArgsAndKwargs[1]) # # async", "muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def printLog(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(LOG, message,", "muteStackTrace=muteStackTrace, newLine=newLine) def failure(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, FAILURE,", "exception=exception) def printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition, muteStackTrace=muteStackTrace,", "= 'LOG' INFO = 'INFO' SUCCESS = 'SUCCESS' SETTING = 'SETTING' DEBUG =", "DEBUG = 'DEBUG' WARNING = 'WARNING' WRAPPER = 'WRAPPER' FAILURE = 'FAILURE' ERROR", "c.TRUE, falseValue = c.FALSE, logLevel = LOG, condition = True ) : if", "SETTING = 'SETTING' DEBUG = 'DEBUG' WARNING = 'WARNING' WRAPPER = 'WRAPPER' FAILURE", "nullValue = c.NONE, trueValue = c.TRUE, falseValue = c.FALSE, logLevel = LOG, condition", "def printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine,", "# logger = loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) # logger.info(it) print(it, **kwargs) def loadSettings() :", "# print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] =", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # LOG_HELPER_SETTINGS[PRINTING] = True # print(itArgsAndKwargs[0],", "= EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote = quote, tabCount = tabCount, nullValue", ": LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin, message, exception, muteStackTrace=False, newLine=False)", "SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine) def setting(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SETTING, muteStackTrace=muteStackTrace,", "def logIt(it, **kwargs) : # global OUTPUT_PRINT_LIST # shouldPrint = True if 0", "joinAtReturn = False ) LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyJsonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception)", "**itArgsAndKwargs[1]) # # async def asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs)", "str(exception) if c.NOTHING == exceptionAsString : return ReflectionHelper.getName(exception.__class__) else : return exceptionAsString def", "LogHelperHelper.softLog(origin, StringHelper.join([message, c.COLON_SPACE, *prettyPythonValue]), logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def prettyJson( origin, message, dictionaryInstance, quote", "def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString = str(exception) if c.NOTHING", "print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------') # print('------------------------------------------------------------------------ awaiting ------------------------------------------------------------------------') # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "def asyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False", "settings[SettingHelper.ACTIVE_ENVIRONMENT] = SettingHelper.getActiveEnvironment() for level in LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level) settings[level] =", "exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message,", "condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(INFO, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception)", "newLine=newLine, margin=margin, exception=exception) def printWarper(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(WRAPPER, message,", "printSuccess(message, condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SUCCESS, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def", "margin=margin, exception=exception) def printFailure(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(FAILURE, message, condition=condition,", "newLine=newLine, exception=exception) def success(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace, newLine=newLine)", "newLine=False) : LogHelperHelper.hardLog(origin, message, exception, FAILURE, muteStackTrace=muteStackTrace, newLine=newLine) def error(origin, message, exception, muteStackTrace=False,", "def prettyPython( origin, message, dictionaryInstance, quote = c.SINGLE_QUOTE, tabCount = 0, nullValue =", "exception=exception) def wraper(origin, message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, WRAPPER, muteStackTrace=muteStackTrace,", "message, exception, muteStackTrace=False, newLine=False) : LogHelperHelper.hardLog(origin, message, exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin,", "# # def logIt(it, **kwargs) : # global OUTPUT_PRINT_LIST # shouldPrint = True", "LogHelperHelper global LOG_HELPER_SETTINGS # import asyncio # global OUTPUT_PRINT_LIST # PRINTING = 'PRINTING'", "muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(ERROR, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def", "while 0 < len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it, **kwargs) :", "= 'TEST' RESET_ALL_COLORS = colorama.Style.RESET_ALL from python_helper.api.src.helper import LogHelperHelper global LOG_HELPER_SETTINGS # import", "OUTPUT_PRINT_LIST # while 0 < len(OUTPUT_PRINT_LIST) : # asyncio.run(asyncPrintIt(OUTPUT_PRINT_LIST.pop(0))) # # def logIt(it,", ": LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True,", "# def loadLogger() : # global OUTPUT_PRINT_LIST # try : # if ObjectHelper.isNone(OUTPUT_PRINT_LIST)", "= True ) : if condition : stdout, stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue =", "log(origin, message, level=LOG, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, LOG, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception)", "# await asyncAsyncPrintIt(itArgsAndKwargs) # LOG_HELPER_SETTINGS[PRINTING] = False # # async def printOutput() :", "async def asyncAsyncPrintIt(itArgsAndKwargs) : # global LOG_HELPER_SETTINGS # while LOG_HELPER_SETTINGS[PRINTING] : # print('----------------------------------------------------------------------------------------------------------------------------------------------------------')", "== exceptionAsString : return ReflectionHelper.getName(exception.__class__) else : return exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage", ": tracebackMessage = f'{c.NEW_LINE}' if muteStackTrace : return StringHelper.join(tracebackMessage.split(c.NEW_LINE)[-2:], character=c.NEW_LINE) return LogHelperHelper.NO_TRACEBACK_PRESENT_MESSAGE if", "stderr = EnvironmentHelper.getCurrentSoutStatus() prettyJsonValue = StringHelper.prettyJson( dictionaryInstance, quote = quote, tabCount = tabCount,", "muteStackTrace=muteStackTrace, newLine=newLine) def debug(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, DEBUG, muteStackTrace=muteStackTrace,", "OUTPUT_PRINT_LIST = [] # except Exception as exception : # OUTPUT_PRINT_LIST = []", "logLevel) EnvironmentHelper.overrideSoutStatus(stdout, stderr) def getExceptionMessage(exception) : if ObjectHelper.isEmpty(exception) : return c.UNKNOWN exceptionAsString =", "exception, ERROR, muteStackTrace=muteStackTrace, newLine=newLine) def test(origin, message, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message,", "level in LogHelperHelper.LEVEL_DICTIONARY : status = EnvironmentHelper.get(level) settings[level] = status if not status", "muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def success(origin, message, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, SUCCESS, muteStackTrace=muteStackTrace,", "exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, WARNING, muteStackTrace=muteStackTrace, newLine=newLine, exception=exception) def wraper(origin, message,", "newLine=newLine, exception=exception) def info(origin, message, level=INFO, exception=None, muteStackTrace=False, newLine=False) : LogHelperHelper.softLog(origin, message, INFO,", "condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message, condition=False, muteStackTrace=False, newLine=True, margin=True, exception=None) : LogHelperHelper.printMessageLog(DEBUG,", "= loadLogger(LOGGER_INSTANCE) # logger.setLevel(logging.DEBUG) ###- logging.basicConfig(filename='example.log', encoding='utf-8', level=logging.DEBUG) colorama.deinit() settings = {} settings[SettingHelper.ACTIVE_ENVIRONMENT]", "LogHelperHelper.printMessageLog(LOG, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin, exception=exception) def printInfo(message, condition=False, muteStackTrace=False, newLine=True, margin=True,", "condition=False, muteStackTrace=False, newLine=True, margin=True) : LogHelperHelper.printMessageLog(SETTING, message, condition=condition, muteStackTrace=muteStackTrace, newLine=newLine, margin=margin) def printDebug(message,", "ReflectionHelper.getName(exception.__class__) else : return exceptionAsString def getTracebackMessage(muteStackTrace) : tracebackMessage = c.BLANK try :" ]
[ "full parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first ten rows EDXfiles=EDXfiles.iloc[[0]] #", "including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams) # Now", "EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr',", "etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits if any backfits were", "Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to check quality of background fits, peaks, etc. EDXfiles=EDXlog[0:5]", "default false for redo, redo of integration but not of background fits; no", "so use to_csv save below after checks) kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf,", "run with functions import pandas as pd import numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX'", "EDXfiles=EDXlog[0:5] # Selecting subsets of all SEM files # Plot counts and background", "... just always strip header when opening Output into single log file for", "command kwargs.update({'redo_backfit':True}) # default false for redo, redo of integration but not of", "Run main quant loop (not autosaved so use to_csv save below after checks)", "do not refit or overwrite backgrounds... use ones made with interactive refitter Backfitlog,", "gauss peak fits # if quant rerun w/o changing backfits (i.e. after custom", "background fit pkwargs.update({'backfitpts':False}) # skip background pts but include fits pkwargs.update({'yrange':[-500,3000]}) # optional", "# Various ways of slicing up above full parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:]", "parameter log, designed to read out pertinent header information from all emsa files", "# can drop or exclude files here if desired (filter of EDXlog) #", "from working directory EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive", "countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol differentiated plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog,", "encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') #", "version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version # If any modifications were", "processing loop for emsa or psmsa parameter extraction # Create parameters log for", "Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites", "are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits if any backfits were changed", "# Manual save of peakfitlog and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False)", "#%% Automated background fitting of SEM-EDX spectra # can drop or exclude files", "create background fit pkwargs.update({'backfitpts':False}) # skip background pts but include fits pkwargs.update({'yrange':[-500,3000]}) #", "# Combine files with same basename/point name (autosaves altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog)", "drop or exclude files here if desired (filter of EDXlog) # Various ways", "pkwargs.update({'yrange':[-500,3000]}) # optional y range for plot.. defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca',", "information from all emsa files within a folder. No need to convert psmsa", "with small timeconst #%% Reload of existing files (if reprocessing data) from working", "redo of integration but not of background fits; no effect on new spectra", "after finishing) Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%% Run interactive", "peak fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO Place", "# TODO Place center of integration on plot for significant peaks # plot", "Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites", "# interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC", "SEM_batch_conversion script Extracts important header info into parameter log, designed to read out", "EDXplot import EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui as EDXqpl #%% # datapath =", "SEM-EDX spectra # can drop or exclude files here if desired (filter of", "can also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for pristine", "PLOTTING to check quality of background fits, peaks, etc. EDXfiles=EDXlog[0:5] # Selecting subsets", "to convert psmsa into csv ... just always strip header when opening Output", "entries in original logbooks (saves after finishing) Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog,", "Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams) # Now proceed to EDX_quant_main for", "selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] #", "with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run main quant loop (not", "only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits", "Manual save of peakfitlog and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%%", "optional x range for plot (default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional plotting", "Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After successful refit of subset of", "parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first ten rows EDXfiles=EDXfiles.iloc[[0]] # select", "EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run main quant loop (not autosaved so use to_csv", "EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not refit or overwrite", "rerun w/o changing backfits (i.e. after custom mods) skip clear of backfits kwargs.update({'clear_old_backfits':True})", "Excel or elsewhere \"\"\" #%% Load modules import glob, sys, os # already", "psmsa files (i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i in psfiles if '\\xa0'", "spectrum's csv file; default true # Find/ Replace subset of files (processed in", "spectra # can drop or exclude files here if desired (filter of EDXlog)", "# plot subtracted data around major elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf')", "csv files (defaults True) kwargs.update({'savegauss':False}) # optional save of gaussian fit column into", "functions import pandas as pd import numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in", "in alternate manner) from above log files.. refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False)", "when opening Output into single log file for import into Excel or elsewhere", "EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting of SEM-EDX spectra # can", "modules import glob, sys, os # already run with functions import pandas as", "options and can also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version", "if any backfits were changed (first reload saved changes from file) EDXlog, Backfitlog,", "for plot.. defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) # list of", "overwrite all backgrounds in csv files (defaults True) kwargs.update({'savegauss':False}) # optional save of", "# Plot counts and background over specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional", "fits # if quant rerun w/o changing backfits (i.e. after custom mods) skip", "addgauss=True, PDFname='peak_report.pdf') # TODO Place center of integration on plot for significant peaks", "loop for emsa or psmsa parameter extraction # Create parameters log for all", "spatial areas (automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files with same", "pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())#", "(defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol differentiated plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams,", "for interference adjustments, \\\\osition calcs, etc. # Renaming of troublesome p_s and psmsa", "folder. No need to convert psmsa into csv ... just always strip header", "# defaults true (false allows skip of existing integrations and gauss peak fits", "name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol differentiated plot (default False) EDXplot.reportcounts(EDXfiles,", "Place center of integration on plot for significant peaks # plot subtracted data", "Create parameters log for all SEM-EDX files (autosaved with prior backup) using parameter", "on plot for significant peaks # plot subtracted data around major elements including", "from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg images with points/areas superimposed (from", "w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files with same basename/point name (autosaves altered", "save name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol differentiated plot (default False)", "as EDXqpl #%% # datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title = \"choose data", "EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only summed files", "made during quant of this data, load local version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv',", "# if quant rerun w/o changing backfits (i.e. after custom mods) skip clear", "rows EDXfiles=EDXfiles.iloc[[0]] # select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False,", "# meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact", "data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%% Main file processing loop for emsa", "# choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with small timeconst #%%", "#%% PLOTTING to check quality of background fits, peaks, etc. EDXfiles=EDXlog[0:5] # Selecting", "load local version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run", "skip background pts but include fits pkwargs.update({'yrange':[-500,3000]}) # optional y range for plot..", "of background fits; no effect on new spectra kwargs.update({'redo_integration':False}) # defaults true (false", "EDXquantparams, Elements, **kwargs) # Manual save of peakfitlog and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv',", "Run interactive EDXrefitter (if any plots, backfit points, etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd())", "of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After successful refit", "#%% Reload of existing files (if reprocessing data) from working directory EDXlog, Backfitlog,", "version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run main quant", "index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After successful refit of subset of files, find/replace entries", "Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to check quality of background fits, peaks,", "can drop or exclude files here if desired (filter of EDXlog) # Various", "Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version # If any modifications were made during quant", "Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs) # Manual save of peakfitlog", "of this data, load local version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8')", "refit of subset of files, find/replace entries in original logbooks (saves after finishing)", "interactive refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs) # Manual save", "fits; no effect on new spectra kwargs.update({'redo_integration':False}) # defaults true (false allows skip", "files (autosaved with prior backup) using parameter template # Checks for existing EDXlogbook", "PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams) # Now proceed to EDX_quant_main for interference", "file; default true # Find/ Replace subset of files (processed in alternate manner)", "parameter extraction # Create parameters log for all SEM-EDX files (autosaved with prior", "no effect on new spectra kwargs.update({'redo_integration':False}) # defaults true (false allows skip of", "is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of points used to create background", "all SEM files # Plot counts and background over specified energy range pkwargs={}", "refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs) # Manual save of", "of integration but not of background fits; no effect on new spectra kwargs.update({'redo_integration':False})", "peak fits # if quant rerun w/o changing backfits (i.e. after custom mods)", "to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) # list of elements to label", "'Ca', 'Fe', 'FeL']}) # list of elements to label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']})", "convert psmsa into csv ... just always strip header when opening Output into", "PDFname='peak_report.pdf') # TODO Place center of integration on plot for significant peaks #", "in csv files (defaults True) kwargs.update({'savegauss':False}) # optional save of gaussian fit column", "EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt version that reacquires params from existing files", "to read out pertinent header information from all emsa files within a folder.", "EDXquantparams, **pkwargs) # plot report with subtracted counts and optionally gaussian peak fits", "(processed in alternate manner) from above log files.. refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv',", "integration on plot for significant peaks # plot subtracted data around major elements", "Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior", "single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only", "# alt version that reacquires params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of", "not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport import EDX_quant_functions as EDXquant import", "No need to convert psmsa into csv ... just always strip header when", "of files (processed in alternate manner) from above log files.. refit of failed", "range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x range for plot (default is 0-10? )", "sys, os # already run with functions import pandas as pd import numpy", "plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot report with", "used element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy ranges without peaks for", "# already run with functions import pandas as pd import numpy as np", "filenames w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt version that reacquires", "check quality of background fits, peaks, etc. EDXfiles=EDXlog[0:5] # Selecting subsets of all", "as EDXimport import EDX_quant_functions as EDXquant import EDX_plot_functions as EDXplot import EDX_refit_tk_gui as", "with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting of SEM-EDX spectra # can drop", "title = \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%% Main file processing", "script Extracts important header info into parameter log, designed to read out pertinent", "strip header when opening Output into single log file for import into Excel", "plot report with subtracted counts and optionally gaussian peak fits (if they exist)", "badpsfiles=[i for i in psfiles if '\\xa0' in i] for i, psfile in", "Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine", "kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog, **kwargs) #", "(false allows skip of existing integrations and gauss peak fits # if quant", "alt save name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol differentiated plot (default", "Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si']", "Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs) # Manual save of peakfitlog and integlog", "choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] #", "alt version that reacquires params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg", "EDX_plot_functions as EDXplot import EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui as EDXqpl #%% #", "Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not refit or overwrite backgrounds...", "pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) # list of elements to label on plots pkwargs.update({'plotelems':['O','Mg','Si',", "of integration on plot for significant peaks # plot subtracted data around major", "for existing EDXlogbook correlating filenames w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) #", "existing files (if reprocessing data) from working directory EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams,", "fitting of SEM-EDX spectra # can drop or exclude files here if desired", "or psmsa parameter extraction # Create parameters log for all SEM-EDX files (autosaved", "with interactive refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs) # Manual", "of existing files (if reprocessing data) from working directory EDXlog, Backfitlog, Integlog, Peakfitlog,", "EDX_import_functions as EDXimport import EDX_quant_functions as EDXquant import EDX_plot_functions as EDXplot import EDX_refit_tk_gui", "# choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500]", "EDXqpl #%% # datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title = \"choose data directory\")", "Fitregionsdf, EDXquantparams, Elements, **kwargs) # Manual save of peakfitlog and integlog are needed", "w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt version that reacquires params", "# default false for redo, redo of integration but not of background fits;", "background fits; no effect on new spectra kwargs.update({'redo_integration':False}) # defaults true (false allows", "quant of this data, load local version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv',", "counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams) # Now proceed to", "version # If any modifications were made during quant of this data, load", "# returns df with spatial areas (automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% #", "gets prior used element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy ranges without", "read out pertinent header information from all emsa files within a folder. No", "not overwrite all backgrounds in csv files (defaults True) kwargs.update({'savegauss':False}) # optional save", "EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting of SEM-EDX spectra # can drop or exclude", "from file) EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not refit", "# meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element", "EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog, **kwargs) # optional kwargs for above command kwargs.update({'redo_backfit':True})", "log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first ten rows EDXfiles=EDXfiles.iloc[[0]] # select single", "adjustments, \\\\osition calcs, etc. # Renaming of troublesome p_s and psmsa files (i.e.", "etc. # Renaming of troublesome p_s and psmsa files (i.e. containing blanks) psfiles=glob.glob('*.p_s')", "integlog, peakfits if any backfits were changed (first reload saved changes from file)", "label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name (defaults to countsback_report.pdf)", "skip of existing integrations and gauss peak fits # if quant rerun w/o", "plotrange, plotelems, SEMquantparams) # Now proceed to EDX_quant_main for interference adjustments, \\\\osition calcs,", "csv file; default true # Find/ Replace subset of files (processed in alternate", "row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only summed", "# backfits fail with small timeconst #%% Reload of existing files (if reprocessing", "defaults true (false allows skip of existing integrations and gauss peak fits #", "list of elements to label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save", "EDXimport import EDX_quant_functions as EDXquant import EDX_plot_functions as EDXplot import EDX_refit_tk_gui as EDXrf", "of EDXlog) # Various ways of slicing up above full parameters log list", "troublesome p_s and psmsa files (i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i in", "to check quality of background fits, peaks, etc. EDXfiles=EDXlog[0:5] # Selecting subsets of", "defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) # list of elements to", "Spyder Editor SEM_batch_conversion script Extracts important header info into parameter log, designed to", "sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt version that reacquires params from", "peakfitlog and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to check", "savgol differentiated plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot", "mods) skip clear of backfits kwargs.update({'clear_old_backfits':True}) # default false option to not overwrite", "# -*- coding: utf-8 -*- \"\"\" Spyder Editor SEM_batch_conversion script Extracts important header", "(i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i in psfiles if '\\xa0' in i]", "(autosaves altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting of SEM-EDX spectra", "encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version # If", "refit or overwrite backgrounds... use ones made with interactive refitter Backfitlog, Peakfitlog, Integlog=", "'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol", "SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files with same basename/point name (autosaves altered EDXlog with", "backfits fail with small timeconst #%% Reload of existing files (if reprocessing data)", "points used to create background fit pkwargs.update({'backfitpts':False}) # skip background pts but include", "emsa or psmsa parameter extraction # Create parameters log for all SEM-EDX files", "psfiles=glob.glob('*.p_s') badpsfiles=[i for i in psfiles if '\\xa0' in i] for i, psfile", "fit column into spectrum's csv file; default true # Find/ Replace subset of", "reload saved changes from file) EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} #", "prior backup) using parameter template # Checks for existing EDXlogbook correlating filenames w/", "are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to check quality of background", "**kwargs) # optional kwargs for above command kwargs.update({'redo_backfit':True}) # default false for redo,", "range for plot (default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of points", "as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport import", "around major elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems,", "import EDX_quantplotter_tk_gui as EDXqpl #%% # datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title =", "# refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) #", "as EDXrf import EDX_quantplotter_tk_gui as EDXqpl #%% # datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\",", "ten rows EDXfiles=EDXfiles.iloc[[0]] # select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed files", "backgrounds... use ones made with interactive refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams,", "timeconst #%% Reload of existing files (if reprocessing data) from working directory EDXlog,", "after checks) kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog,", "logbooks (saves after finishing) Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%%", "**kwargs) # Manual save of peakfitlog and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv',", "Automated background fitting of SEM-EDX spectra # can drop or exclude files here", "if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport import EDX_quant_functions as", "finishing) Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%% Run interactive EDXrefitter", "plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) #", "subtracted data around major elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf,", "\"\"\" Spyder Editor SEM_batch_conversion script Extracts important header info into parameter log, designed", "pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of points used to create background fit pkwargs.update({'backfitpts':False}) #", "Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog, **kwargs) # optional kwargs for above command", "of gaussian fit column into spectrum's csv file; default true # Find/ Replace", "files within a folder. No need to convert psmsa into csv ... just", "'Fe', 'FeL']}) # list of elements to label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'})", "EDX_quantplotter_tk_gui as EDXqpl #%% # datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title = \"choose", "also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for pristine grains", "# After successful refit of subset of files, find/replace entries in original logbooks", "Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy ranges", "peaks for background fitting (various options and can also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv',", "files with same basename/point name (autosaves altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated", "calcs, etc. # Renaming of troublesome p_s and psmsa files (i.e. containing blanks)", "version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv',", "exclude files here if desired (filter of EDXlog) # Various ways of slicing", "default false option to not overwrite all backgrounds in csv files (defaults True)", "allows skip of existing integrations and gauss peak fits # if quant rerun", "#%% # datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title = \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa')", "(filter of EDXlog) # Various ways of slicing up above full parameters log", "optional y range for plot.. defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']})", "'FeL']}) # list of elements to label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) #", "for emsa or psmsa parameter extraction # Create parameters log for all SEM-EDX", "EDXquant import EDX_plot_functions as EDXplot import EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui as EDXqpl", "Redo integlog, peakfits if any backfits were changed (first reload saved changes from", "reacquires params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg images with points/areas", "save of peakfitlog and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING", "ranges without peaks for background fitting (various options and can also create custom", "Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version #", "interactive EDXrefitter (if any plots, backfit points, etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) #", "existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg images with points/areas superimposed (from .psref", "i in psfiles if '\\xa0' in i] for i, psfile in enumerate(badpsfiles): EDXimport.renamePSset(psfile,", "Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] #", "case=False)] # choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with small timeconst", "# TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version # If any", "background fits, peaks, etc. EDXfiles=EDXlog[0:5] # Selecting subsets of all SEM files #", "After successful refit of subset of files, find/replace entries in original logbooks (saves", "images with points/areas superimposed (from .psref and .p_s files).. jpgs directly saved #", "fits pkwargs.update({'yrange':[-500,3000]}) # optional y range for plot.. defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si',", "= EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%% Run interactive EDXrefitter (if any plots, backfit", "Renaming of troublesome p_s and psmsa files (i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for", "pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) # include", "EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%% Run interactive EDXrefitter (if any plots, backfit points,", "of background fits, peaks, etc. EDXfiles=EDXlog[0:5] # Selecting subsets of all SEM files", "= filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title = \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option", "(first reload saved changes from file) EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False}", "backfit points, etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits if any", "Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog, **kwargs) # optional", "files here if desired (filter of EDXlog) # Various ways of slicing up", "option to not overwrite all backgrounds in csv files (defaults True) kwargs.update({'savegauss':False}) #", "redo, redo of integration but not of background fits; no effect on new", "pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol differentiated", "Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8')", "with same basename/point name (autosaves altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background", "of subset of files, find/replace entries in original logbooks (saves after finishing) Backfitlog,", "for import into Excel or elsewhere \"\"\" #%% Load modules import glob, sys,", "EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot report with subtracted counts and", "correlating filenames w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt version that", "save below after checks) kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog,", "quant rerun w/o changing backfits (i.e. after custom mods) skip clear of backfits", "EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui as EDXqpl #%% # datapath = filedialog.askdirectorypwd #", "w/o changing backfits (i.e. after custom mods) skip clear of backfits kwargs.update({'clear_old_backfits':True}) #", "-*- \"\"\" Spyder Editor SEM_batch_conversion script Extracts important header info into parameter log,", "for pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv',", "peaks, etc. EDXfiles=EDXlog[0:5] # Selecting subsets of all SEM files # Plot counts", "# list of elements to label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt", "Load energy ranges without peaks for background fitting (various options and can also", ") pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of points used to create background fit pkwargs.update({'backfitpts':False})", "of slicing up above full parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first", "first ten rows EDXfiles=EDXfiles.iloc[[0]] # select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed", "Replace subset of files (processed in alternate manner) from above log files.. refit", "elements to label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name (defaults", "original logbooks (saves after finishing) Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog)", "create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for pristine grains on", "checks) kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog, **kwargs)", "EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run main quant loop (not autosaved so", "plotelems, SEMquantparams) # Now proceed to EDX_quant_main for interference adjustments, \\\\osition calcs, etc.", "(defaults True) kwargs.update({'savegauss':False}) # optional save of gaussian fit column into spectrum's csv", "# optional y range for plot.. defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe',", "report with subtracted counts and optionally gaussian peak fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles,", "Reload of existing files (if reprocessing data) from working directory EDXlog, Backfitlog, Integlog,", "psmsa parameter extraction # Create parameters log for all SEM-EDX files (autosaved with", "# Version for pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv',", "Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not refit or overwrite backgrounds... use", "files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with small timeconst #%% Reload of existing files", "import EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui as EDXqpl #%% # datapath = filedialog.askdirectorypwd", "saved # returns df with spatial areas (automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%%", "opening Output into single log file for import into Excel or elsewhere \"\"\"", "to_csv save below after checks) kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements,", "modifications were made during quant of this data, load local version stored with", "for plot (default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of points used", "here if desired (filter of EDXlog) # Various ways of slicing up above", "log file for import into Excel or elsewhere \"\"\" #%% Load modules import", "default true # Find/ Replace subset of files (processed in alternate manner) from", "parameter template # Checks for existing EDXlogbook correlating filenames w/ sample EDXlog= EDXimport.getparams(filelist)", "center of integration on plot for significant peaks # plot subtracted data around", "Version for pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8')", "EDXimport.getparams(filelist, reprocess=True) # alt version that reacquires params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) #", "energy ranges without peaks for background fitting (various options and can also create", "data, load local version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% #", "psmsa into csv ... just always strip header when opening Output into single", "EDX_quant_functions as EDXquant import EDX_plot_functions as EDXplot import EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui", "filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title = \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%%", "all SEM-EDX files (autosaved with prior backup) using parameter template # Checks for", "SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets", "y range for plot.. defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) #", "as pd import numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import", "into parameter log, designed to read out pertinent header information from all emsa", "and psmsa files (i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i in psfiles if", "(autosaved with prior backup) using parameter template # Checks for existing EDXlogbook correlating", "EDXrefitter (if any plots, backfit points, etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo", "(default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of points used to create", "optional plotting of points used to create background fit pkwargs.update({'backfitpts':False}) # skip background", "major elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams)", "SEMquantparams) # Now proceed to EDX_quant_main for interference adjustments, \\\\osition calcs, etc. #", "grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') #", "use to_csv save below after checks) kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams,", "fitting (various options and can also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8')", "pkwargs.update({'savgol':True}) # include savgol differentiated plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams,", "specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x range for plot (default is", "info into parameter log, designed to read out pertinent header information from all", "not of background fits; no effect on new spectra kwargs.update({'redo_integration':False}) # defaults true", "import into Excel or elsewhere \"\"\" #%% Load modules import glob, sys, os", "import EDX_quant_functions as EDXquant import EDX_plot_functions as EDXplot import EDX_refit_tk_gui as EDXrf import", "ways of slicing up above full parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab", "'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport import EDX_quant_functions as EDXquant", "datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title = \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa", "\"\"\" #%% Load modules import glob, sys, os # already run with functions", "within a folder. No need to convert psmsa into csv ... just always", "pandas as pd import numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX')", "Selecting subsets of all SEM files # Plot counts and background over specified", "TODO Place center of integration on plot for significant peaks # plot subtracted", ".psref and .p_s files).. jpgs directly saved # returns df with spatial areas", "quality of background fits, peaks, etc. EDXfiles=EDXlog[0:5] # Selecting subsets of all SEM", "counts and background over specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x range", "all emsa files within a folder. No need to convert psmsa into csv", "Extracts important header info into parameter log, designed to read out pertinent header", "(various options and can also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') #", "(if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO Place center of", "version that reacquires params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg images", "this data, load local version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%%", "false option to not overwrite all backgrounds in csv files (defaults True) kwargs.update({'savegauss':False})", "fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO Place center", "choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with small timeconst #%% Reload", "artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element set Elements.append('PtL2') Elements.extend(['GaL','PtM',", "files (defaults True) kwargs.update({'savegauss':False}) # optional save of gaussian fit column into spectrum's", "refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After successful", "fits, peaks, etc. EDXfiles=EDXlog[0:5] # Selecting subsets of all SEM files # Plot", "subtracted counts and optionally gaussian peak fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams,", "# skip background pts but include fits pkwargs.update({'yrange':[-500,3000]}) # optional y range for", "EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO Place center of integration on plot", "fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After successful refit of subset", "encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run main quant loop (not autosaved so use", "x range for plot (default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of", "(automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files with same basename/point name", "kwargs.update({'redo_integration':False}) # defaults true (false allows skip of existing integrations and gauss peak", "Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%% Run interactive EDXrefitter (if any plots,", "df with spatial areas (automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files", "**pkwargs) # plot report with subtracted counts and optionally gaussian peak fits (if", "that reacquires params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg images with", "# Create parameters log for all SEM-EDX files (autosaved with prior backup) using", "quant loop (not autosaved so use to_csv save below after checks) kwargs={} Backfitlog,", "plot (default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of points used to", "If any modifications were made during quant of this data, load local version", "# Checks for existing EDXlogbook correlating filenames w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist,", "SEM-EDX files (autosaved with prior backup) using parameter template # Checks for existing", "select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose", "kwargs.update({'redo_backfit':True}) # default false for redo, redo of integration but not of background", "in psfiles if '\\xa0' in i] for i, psfile in enumerate(badpsfiles): EDXimport.renamePSset(psfile, '\\xa0',", "and background over specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x range for", "import glob, sys, os # already run with functions import pandas as pd", "superimposed (from .psref and .p_s files).. jpgs directly saved # returns df with", "on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True})", "Elements, Backfitlog, Integlog, Peakfitlog, **kwargs) # optional kwargs for above command kwargs.update({'redo_backfit':True}) #", "Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not refit or overwrite backgrounds... use ones", "index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After successful refit of subset of files,", "summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail", "# alt save name (defaults to countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol differentiated plot", "Creation of jpg images with points/areas superimposed (from .psref and .p_s files).. jpgs", "Checks for existing EDXlogbook correlating filenames w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True)", "integrations and gauss peak fits # if quant rerun w/o changing backfits (i.e.", "of backfits kwargs.update({'clear_old_backfits':True}) # default false option to not overwrite all backgrounds in", "and optionally gaussian peak fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf')", "they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO Place center of integration", "'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB", "plotting of points used to create background fit pkwargs.update({'backfitpts':False}) # skip background pts", "with points/areas superimposed (from .psref and .p_s files).. jpgs directly saved # returns", "column into spectrum's csv file; default true # Find/ Replace subset of files", "backfits (i.e. after custom mods) skip clear of backfits kwargs.update({'clear_old_backfits':True}) # default false", "local version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run main", "EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits if any backfits were changed (first reload", "files.. refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After", "# optional save of gaussian fit column into spectrum's csv file; default true", "from all emsa files within a folder. No need to convert psmsa into", "Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2'])", "# psmsa option #%% Main file processing loop for emsa or psmsa parameter", "main quant loop (not autosaved so use to_csv save below after checks) kwargs={}", "#%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] #", "prior used element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy ranges without peaks", "EDXrf import EDX_quantplotter_tk_gui as EDXqpl #%% # datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title", "elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams) #", "above command kwargs.update({'redo_backfit':True}) # default false for redo, redo of integration but not", "log files.. refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #", "optional kwargs for above command kwargs.update({'redo_backfit':True}) # default false for redo, redo of", "# optional kwargs for above command kwargs.update({'redo_backfit':True}) # default false for redo, redo", "Editor SEM_batch_conversion script Extracts important header info into parameter log, designed to read", "if '\\xa0' in i] for i, psfile in enumerate(badpsfiles): EDXimport.renamePSset(psfile, '\\xa0', '_') train=pd.read_csv('Backfit_training.csv')", "csv ... just always strip header when opening Output into single log file", "graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version", "gaussian peak fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO", "exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO Place center of integration on", "Backfitlog, Integlog, Peakfitlog, **kwargs) # optional kwargs for above command kwargs.update({'redo_backfit':True}) # default", "for all SEM-EDX files (autosaved with prior backup) using parameter template # Checks", "interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr',", "backfits kwargs.update({'clear_old_backfits':True}) # default false option to not overwrite all backgrounds in csv", "blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i in psfiles if '\\xa0' in i] for i,", "clear of backfits kwargs.update({'clear_old_backfits':True}) # default false option to not overwrite all backgrounds", "altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting of SEM-EDX spectra #", "of files, find/replace entries in original logbooks (saves after finishing) Backfitlog, Peakfitlog, Integlog", "if quant rerun w/o changing backfits (i.e. after custom mods) skip clear of", "backfits were changed (first reload saved changes from file) EDXlog, Backfitlog, Integlog, Peakfitlog,", "= \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%% Main file processing loop", "(if any plots, backfit points, etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog,", "directory EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection", "autosaved so use to_csv save below after checks) kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog,", "changes from file) EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not", "and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to check quality", "Fitregionsdf, plotrange, plotelems, SEMquantparams) # Now proceed to EDX_quant_main for interference adjustments, \\\\osition", "# optional plotting of points used to create background fit pkwargs.update({'backfitpts':False}) # skip", "files).. jpgs directly saved # returns df with spatial areas (automatically saved w/", "sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport import EDX_quant_functions as EDXquant import EDX_plot_functions as", "element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy ranges without peaks for background", "'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory", "new spectra kwargs.update({'redo_integration':False}) # defaults true (false allows skip of existing integrations and", "element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL']", "background over specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x range for plot", "# include savgol differentiated plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs)", "data around major elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange,", "name (autosaves altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting of SEM-EDX", "+FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element set Elements.append('PtL2')", "elsewhere \"\"\" #%% Load modules import glob, sys, os # already run with", "files (i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i in psfiles if '\\xa0' in", "false for redo, redo of integration but not of background fits; no effect", "encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version # If any modifications were made during", "of elements to label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name", "\\\\osition calcs, etc. # Renaming of troublesome p_s and psmsa files (i.e. containing", "failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After successful refit of", "or exclude files here if desired (filter of EDXlog) # Various ways of", "significant peaks # plot subtracted data around major elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles,", "single log file for import into Excel or elsewhere \"\"\" #%% Load modules", "during quant of this data, load local version stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8')", "psmsa option #%% Main file processing loop for emsa or psmsa parameter extraction", "option #%% Main file processing loop for emsa or psmsa parameter extraction #", "pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x range for plot (default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog})", "EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first ten rows EDXfiles=EDXfiles.iloc[[0]] # select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)]", "Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM", "any plots, backfit points, etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits", "slicing up above full parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first ten", "all backgrounds in csv files (defaults True) kwargs.update({'savegauss':False}) # optional save of gaussian", "already run with functions import pandas as pd import numpy as np if", "saved changes from file) EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do", "differentiated plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot report", "points, etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits if any backfits", "a folder. No need to convert psmsa into csv ... just always strip", "Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not refit or overwrite backgrounds... use ones made with", "over specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x range for plot (default", "# Renaming of troublesome p_s and psmsa files (i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i", "alternate manner) from above log files.. refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv',", "custom mods) skip clear of backfits kwargs.update({'clear_old_backfits':True}) # default false option to not", "above full parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first ten rows EDXfiles=EDXfiles.iloc[[0]]", "# Find/ Replace subset of files (processed in alternate manner) from above log", "changing backfits (i.e. after custom mods) skip clear of backfits kwargs.update({'clear_old_backfits':True}) # default", "local version # If any modifications were made during quant of this data,", "important header info into parameter log, designed to read out pertinent header information", "extraction # Create parameters log for all SEM-EDX files (autosaved with prior backup)", "reprocess=True) # alt version that reacquires params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation", "bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits if any backfits were changed (first", "for i in psfiles if '\\xa0' in i] for i, psfile in enumerate(badpsfiles):", "refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load", "always strip header when opening Output into single log file for import into", "any modifications were made during quant of this data, load local version stored", "EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs) # Manual save of peakfitlog and integlog are", "0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional plotting of points used to create background fit", "were changed (first reload saved changes from file) EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams,", "with subtracted counts and optionally gaussian peak fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems,", "fit pkwargs.update({'backfitpts':False}) # skip background pts but include fits pkwargs.update({'yrange':[-500,3000]}) # optional y", "index=False) #%% PLOTTING to check quality of background fits, peaks, etc. EDXfiles=EDXlog[0:5] #", "range for plot.. defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) # list", "TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version # If any modifications", "points/areas superimposed (from .psref and .p_s files).. jpgs directly saved # returns df", "optionally gaussian peak fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') #", "encoding='utf-8') #%% # Run main quant loop (not autosaved so use to_csv save", "EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams) # Now proceed to EDX_quant_main", "index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to check quality of background fits, peaks, etc.", "sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport import EDX_quant_functions as EDXquant import EDX_plot_functions as EDXplot", "as EDXplot import EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui as EDXqpl #%% # datapath", "with prior backup) using parameter template # Checks for existing EDXlogbook correlating filenames", "EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with small timeconst #%% Reload of existing files (if", "import pandas as pd import numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path:", "files (processed in alternate manner) from above log files.. refit of failed fits", "files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with", "to create background fit pkwargs.update({'backfitpts':False}) # skip background pts but include fits pkwargs.update({'yrange':[-500,3000]})", "# initialdir=\"H:\\\\Research_data\", title = \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%% Main", "true (false allows skip of existing integrations and gauss peak fits # if", "True) kwargs.update({'savegauss':False}) # optional save of gaussian fit column into spectrum's csv file;", "pkwargs.update({'backfitpts':False}) # skip background pts but include fits pkwargs.update({'yrange':[-500,3000]}) # optional y range", "for above command kwargs.update({'redo_backfit':True}) # default false for redo, redo of integration but", "into csv ... just always strip header when opening Output into single log", "areas (automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files with same basename/point", "meteorites Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] # pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti']", "or overwrite backgrounds... use ones made with interactive refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog,", "Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy ranges without peaks for background fitting (various", "import EDX_import_functions as EDXimport import EDX_quant_functions as EDXquant import EDX_plot_functions as EDXplot import", "# select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] #", "pkwargs.update({'xrange':'0.3-10'}) # optional x range for plot (default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) #", "Combine files with same basename/point name (autosaves altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%%", "of peakfitlog and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to", "# do not refit or overwrite backgrounds... use ones made with interactive refitter", "made with interactive refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs) #", "False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot report with subtracted counts", "skip clear of backfits kwargs.update({'clear_old_backfits':True}) # default false option to not overwrite all", "data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run main quant loop (not autosaved", "backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting of SEM-EDX spectra # can drop or", "kwargs.update({'savegauss':False}) # optional save of gaussian fit column into spectrum's csv file; default", "above log files.. refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False)", "energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x range for plot (default is 0-10?", "files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg images with points/areas superimposed (from .psref and", "for significant peaks # plot subtracted data around major elements including corrected counts", "EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection Elements=['S','C','Ca','O','Cr',", "of all SEM files # Plot counts and background over specified energy range", "header when opening Output into single log file for import into Excel or", "import numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as", "initialdir=\"H:\\\\Research_data\", title = \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%% Main file", "true # Find/ Replace subset of files (processed in alternate manner) from above", "pd import numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions", "changed (first reload saved changes from file) EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles()", "designed to read out pertinent header information from all emsa files within a", "to not overwrite all backgrounds in csv files (defaults True) kwargs.update({'savegauss':False}) # optional", "# Now proceed to EDX_quant_main for interference adjustments, \\\\osition calcs, etc. # Renaming", "etc. EDXfiles=EDXlog[0:5] # Selecting subsets of all SEM files # Plot counts and", "subset of files (processed in alternate manner) from above log files.. refit of", "EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot report with subtracted counts and optionally", "in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport import EDX_quant_functions as EDXquant import EDX_plot_functions", "Plot counts and background over specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) # optional x", "SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO Place center of integration on plot for significant", "log, designed to read out pertinent header information from all emsa files within", "just always strip header when opening Output into single log file for import", "glob, sys, os # already run with functions import pandas as pd import", "pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8')", "stored with data EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEMquantparams.csv', encoding='utf-8') EDXquantparams=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEMquantparams.csv', encoding='utf-8') #%% # Run main quant loop", "include fits pkwargs.update({'yrange':[-500,3000]}) # optional y range for plot.. defaults to data range", "background pts but include fits pkwargs.update({'yrange':[-500,3000]}) # optional y range for plot.. defaults", "params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg images with points/areas superimposed", "pertinent header information from all emsa files within a folder. No need to", "list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first ten rows EDXfiles=EDXfiles.iloc[[0]] # select single row", "header info into parameter log, designed to read out pertinent header information from", "encoding='utf-8') # Version for pristine grains on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version", "into Excel or elsewhere \"\"\" #%% Load modules import glob, sys, os #", "of SEM-EDX spectra # can drop or exclude files here if desired (filter", "backup) using parameter template # Checks for existing EDXlogbook correlating filenames w/ sample", "range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) # list of elements to label on plots", "# local version # If any modifications were made during quant of this", "Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs) # Manual save of peakfitlog and", "Integlog.to_csv('Integquantlog.csv', index=False) # After successful refit of subset of files, find/replace entries in", "save of gaussian fit column into spectrum's csv file; default true # Find/", "same basename/point name (autosaves altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting", "Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) # After successful refit of subset of files, find/replace", "of troublesome p_s and psmsa files (i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i", "optional save of gaussian fit column into spectrum's csv file; default true #", "# optional x range for plot (default is 0-10? ) pkwargs.update({'backfitdf':Backfitlog}) # optional", "Backfitlog, Peakfitlog, Integlog) #%% Run interactive EDXrefitter (if any plots, backfit points, etc.", "used to create background fit pkwargs.update({'backfitpts':False}) # skip background pts but include fits", "interference adjustments, \\\\osition calcs, etc. # Renaming of troublesome p_s and psmsa files", "subset of files, find/replace entries in original logbooks (saves after finishing) Backfitlog, Peakfitlog,", "Main file processing loop for emsa or psmsa parameter extraction # Create parameters", "data) from working directory EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) #", "only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with small timeconst #%% Reload of", "p_s and psmsa files (i.e. containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i in psfiles", "filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%% Main file processing loop for emsa or psmsa", "out pertinent header information from all emsa files within a folder. No need", "reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams) # Now proceed to EDX_quant_main for interference adjustments,", "EDXfiles=EDXfiles[0:10][:] # grab first ten rows EDXfiles=EDXfiles.iloc[[0]] # select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] #", "successful refit of subset of files, find/replace entries in original logbooks (saves after", "in original logbooks (saves after finishing) Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog,", "up above full parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] # grab first ten rows", "np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport import EDX_quant_functions", "meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element set", "'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used", "existing EDXlogbook correlating filenames w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt", "integration but not of background fits; no effect on new spectra kwargs.update({'redo_integration':False}) #", "counts and optionally gaussian peak fits (if they exist) EDXplot.reportSEMpeaks(EDXfiles, plotelems, SEMquantparams, addgauss=True,", "plots, backfit points, etc. are bad) EDXrf.launch_refitter() EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits if", "using parameter template # Checks for existing EDXlogbook correlating filenames w/ sample EDXlog=", "for background fitting (various options and can also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8')", "#%% # Combine files with same basename/point name (autosaves altered EDXlog with backup)", "import EDX_plot_functions as EDXplot import EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui as EDXqpl #%%", "EDXlogbook correlating filenames w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt version", "basename/point name (autosaves altered EDXlog with backup) EDXlog=EDXimport.combineEDX(EDXlog) #%% Automated background fitting of", "background fitting (various options and can also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv',", "Output into single log file for import into Excel or elsewhere \"\"\" #%%", "existing integrations and gauss peak fits # if quant rerun w/o changing backfits", "# grab first ten rows EDXfiles=EDXfiles.iloc[[0]] # select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose", "file processing loop for emsa or psmsa parameter extraction # Create parameters log", "# plot report with subtracted counts and optionally gaussian peak fits (if they", "directly saved # returns df with spatial areas (automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot()", "Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%% Run interactive EDXrefitter (if any", "files # Plot counts and background over specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'}) #", "spectra kwargs.update({'redo_integration':False}) # defaults true (false allows skip of existing integrations and gauss", "(from .psref and .p_s files).. jpgs directly saved # returns df with spatial", "analogs Elements=np.ndarray.tolist(Integlog.Element.unique())# gets prior used element set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy", "EDX_quant_main for interference adjustments, \\\\osition calcs, etc. # Renaming of troublesome p_s and", "without peaks for background fitting (various options and can also create custom version)", "include savgol differentiated plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) #", "loop (not autosaved so use to_csv save below after checks) kwargs={} Backfitlog, Peakfitlog,", "# datapath = filedialog.askdirectorypwd # initialdir=\"H:\\\\Research_data\", title = \"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') #", "plot subtracted data around major elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles,", "to label on plots pkwargs.update({'plotelems':['O','Mg','Si', 'Fe']}) pkwargs.update({'PDFname':'counts_report_9Jan18.pdf'}) # alt save name (defaults to", "# Creation of jpg images with points/areas superimposed (from .psref and .p_s files)..", "to countsback_report.pdf) pkwargs.update({'savgol':True}) # include savgol differentiated plot (default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs)", "-*- coding: utf-8 -*- \"\"\" Spyder Editor SEM_batch_conversion script Extracts important header info", "#%% Main file processing loop for emsa or psmsa parameter extraction # Create", "Load modules import glob, sys, os # already run with functions import pandas", "# default false option to not overwrite all backgrounds in csv files (defaults", "SEM files # Plot counts and background over specified energy range pkwargs={} pkwargs.update({'xrange':'0.3-10'})", "integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to check quality of", "Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local version # If any modifications were made", "jpg images with points/areas superimposed (from .psref and .p_s files).. jpgs directly saved", "EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt version that reacquires params from existing files EDXlog.to_csv('EDXparamlog.csv',index=False)", "EDXqpl.launch_plotter(os.getcwd()) # Redo integlog, peakfits if any backfits were changed (first reload saved", "needed Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv', index=False) #%% PLOTTING to check quality of background fits,", "plotelems, SEMquantparams, addgauss=True, PDFname='peak_report.pdf') # TODO Place center of integration on plot for", "of points used to create background fit pkwargs.update({'backfitpts':False}) # skip background pts but", "and can also create custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for", "# If any modifications were made during quant of this data, load local", "if desired (filter of EDXlog) # Various ways of slicing up above full", "emsa files within a folder. No need to convert psmsa into csv ...", "desired (filter of EDXlog) # Various ways of slicing up above full parameters", "fail with small timeconst #%% Reload of existing files (if reprocessing data) from", "psfiles if '\\xa0' in i] for i, psfile in enumerate(badpsfiles): EDXimport.renamePSset(psfile, '\\xa0', '_')", "Elements, **kwargs) # Manual save of peakfitlog and integlog are needed Peakfitlog.to_csv('Peakfitlog.csv', index=False)", "parameters log for all SEM-EDX files (autosaved with prior backup) using parameter template", "utf-8 -*- \"\"\" Spyder Editor SEM_batch_conversion script Extracts important header info into parameter", "Peakfitlog, **kwargs) # optional kwargs for above command kwargs.update({'redo_backfit':True}) # default false for", "header information from all emsa files within a folder. No need to convert", "pts but include fits pkwargs.update({'yrange':[-500,3000]}) # optional y range for plot.. defaults to", "Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si']", "numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not in sys.path: sys.path.append('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX') import EDX_import_functions as EDXimport", "EDXlog.to_csv('EDXparamlog.csv',index=False) # Creation of jpg images with points/areas superimposed (from .psref and .p_s", "#%% Run interactive EDXrefitter (if any plots, backfit points, etc. are bad) EDXrf.launch_refitter()", "Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy ranges without peaks for background fitting (various options", ".p_s files).. jpgs directly saved # returns df with spatial areas (automatically saved", "(saves after finishing) Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%% Run", "set Elements.append('PtL2') Elements.extend(['GaL','PtM', 'Ga','PtL','PtL2']) # Load energy ranges without peaks for background fitting", "EDXlog= EDXimport.getparams(filelist) EDXlog= EDXimport.getparams(filelist, reprocess=True) # alt version that reacquires params from existing", "encoding='utf-8') # local version # If any modifications were made during quant of", "to EDX_quant_main for interference adjustments, \\\\osition calcs, etc. # Renaming of troublesome p_s", "as EDXquant import EDX_plot_functions as EDXplot import EDX_refit_tk_gui as EDXrf import EDX_quantplotter_tk_gui as", "EDXlog) # Various ways of slicing up above full parameters log list EDXfiles=EDXlog", "EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)] # choose only summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with small", "small timeconst #%% Reload of existing files (if reprocessing data) from working directory", "directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%% Main file processing loop for emsa or", "returns df with spatial areas (automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine", "from above log files.. refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False) Integlog.to_csv('Integquantlog.csv',", "jpgs directly saved # returns df with spatial areas (automatically saved w/ backup)", "EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog, **kwargs) # optional kwargs for above", "into spectrum's csv file; default true # Find/ Replace subset of files (processed", "peakfits if any backfits were changed (first reload saved changes from file) EDXlog,", "of jpg images with points/areas superimposed (from .psref and .p_s files).. jpgs directly", "coding: utf-8 -*- \"\"\" Spyder Editor SEM_batch_conversion script Extracts important header info into", "Integlog, Peakfitlog, **kwargs) # optional kwargs for above command kwargs.update({'redo_backfit':True}) # default false", "'Ga','PtL','PtL2']) # Load energy ranges without peaks for background fitting (various options and", "on graphene Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_pristine.csv', encoding='utf-8') # TEM version Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\TEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('SEM_backfit_regions_alt.csv', encoding='utf-8') # local", "EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not refit or overwrite backgrounds... use ones made", "Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog, **kwargs) # optional kwargs for", "effect on new spectra kwargs.update({'redo_integration':False}) # defaults true (false allows skip of existing", "# pristine SiC Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si','PtM','PtL','PtL2','Ga','GaL'] # meteorites +FIB artifact Elements=['N','C','O','FeL','Fe','S','Ca','Mg','Al','Si','Ti'] # refractory analogs", "summed files EDXfiles=EDXfiles[EDXfiles['Timeconst']>12500] # backfits fail with small timeconst #%% Reload of existing", "# Load energy ranges without peaks for background fitting (various options and can", "files, find/replace entries in original logbooks (saves after finishing) Backfitlog, Peakfitlog, Integlog =", "subsets of all SEM files # Plot counts and background over specified energy", "of existing integrations and gauss peak fits # if quant rerun w/o changing", "kwargs for above command kwargs.update({'redo_backfit':True}) # default false for redo, redo of integration", "(not autosaved so use to_csv save below after checks) kwargs={} Backfitlog, Peakfitlog, Integlog=", "grab first ten rows EDXfiles=EDXfiles.iloc[[0]] # select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only", "#%% # Run main quant loop (not autosaved so use to_csv save below", "with spatial areas (automatically saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files with", "kwargs={'newback':False,'overwrite':False} # do not refit or overwrite backgrounds... use ones made with interactive", "use ones made with interactive refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements,", "background fitting of SEM-EDX spectra # can drop or exclude files here if", "Find/ Replace subset of files (processed in alternate manner) from above log files..", "backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files with same basename/point name (autosaves altered EDXlog", "Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element selection Elements=['S','C','Ca','O','Cr', 'FeL','Fe','Mg','Al','Si'] #", "manner) from above log files.. refit of failed fits Backfitlog.to_csv('Backfitparamslog.csv', index=False) Peakfitlog.to_csv('Peakfitlog.csv', index=False)", "on new spectra kwargs.update({'redo_integration':False}) # defaults true (false allows skip of existing integrations", "gaussian fit column into spectrum's csv file; default true # Find/ Replace subset", "data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) # list of elements to label on", "template # Checks for existing EDXlogbook correlating filenames w/ sample EDXlog= EDXimport.getparams(filelist) EDXlog=", "log for all SEM-EDX files (autosaved with prior backup) using parameter template #", "reprocessing data) from working directory EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams)", "file) EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() kwargs={'newback':False,'overwrite':False} # do not refit or", "file for import into Excel or elsewhere \"\"\" #%% Load modules import glob,", "Peakfitlog, Integlog) #%% Run interactive EDXrefitter (if any plots, backfit points, etc. are", "(if reprocessing data) from working directory EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%%", "saved w/ backup) SpatialAreasLog=EDXimport.processpointshoot() #%% # Combine files with same basename/point name (autosaves", "and gauss peak fits # if quant rerun w/o changing backfits (i.e. after", "into single log file for import into Excel or elsewhere \"\"\" #%% Load", "plot.. defaults to data range pkwargs.update({'plotelems':['O','Mg','S','Si', 'Ca', 'Fe', 'FeL']}) # list of elements", "Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog, Peakfitlog, **kwargs) # optional kwargs", "EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot report with subtracted counts and optionally gaussian peak", "**pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot report with subtracted counts and optionally gaussian", "(default False) EDXplot.reportcounts(EDXfiles, EDXquantparams, **pkwargs) EDXplot.reportcounts(EDXlog, EDXquantparams, **pkwargs) # plot report with subtracted", "any backfits were changed (first reload saved changes from file) EDXlog, Backfitlog, Integlog,", "corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog, PDFname='Subcounts_major_report.pdf') reportcountspeakfits(EDXfiles, Fitregionsdf, plotrange, plotelems, SEMquantparams) # Now proceed", "kwargs.update({'clear_old_backfits':True}) # default false option to not overwrite all backgrounds in csv files", "with functions import pandas as pd import numpy as np if 'C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX' not", "files (if reprocessing data) from working directory EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles()", "EDXfiles=EDXfiles.iloc[[0]] # select single row EDXfiles=EDXfiles[EDXfiles['Filenumber'].str.contains(\"\\+\",na=False)] # choose only summed files EDXfiles=EDXfiles[~EDXfiles['Comments'].str.contains(\"exclude\",na=False, case=False)]", "Integlog) #%% Run interactive EDXrefitter (if any plots, backfit points, etc. are bad)", "peaks # plot subtracted data around major elements including corrected counts EDXplot.reportsubdatamajor(EDXfiles, Integquantlog,", "for redo, redo of integration but not of background fits; no effect on", "# Selecting subsets of all SEM files # Plot counts and background over", "and .p_s files).. jpgs directly saved # returns df with spatial areas (automatically", "plot for significant peaks # plot subtracted data around major elements including corrected", "(i.e. after custom mods) skip clear of backfits kwargs.update({'clear_old_backfits':True}) # default false option", "ones made with interactive refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, **kwargs)", "os # already run with functions import pandas as pd import numpy as", "containing blanks) psfiles=glob.glob('*.p_s') badpsfiles=[i for i in psfiles if '\\xa0' in i] for", "Various ways of slicing up above full parameters log list EDXfiles=EDXlog EDXfiles=EDXfiles[0:10][:] #", "working directory EDXlog, Backfitlog, Integlog, Peakfitlog, EDXquantparams, Interferences=EDXimport.loadprocessfiles() #%% Elements=EDXimport.pickelemsGUI(EDXquantparams) # interactive element", "Now proceed to EDX_quant_main for interference adjustments, \\\\osition calcs, etc. # Renaming of", "backgrounds in csv files (defaults True) kwargs.update({'savegauss':False}) # optional save of gaussian fit", "or elsewhere \"\"\" #%% Load modules import glob, sys, os # already run", "after custom mods) skip clear of backfits kwargs.update({'clear_old_backfits':True}) # default false option to", "custom version) Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions.csv', encoding='utf-8') Fitregionsdf=pd.read_csv('C:\\\\Users\\\\tkc\\\\Documents\\\\Python_Scripts\\\\EDX\\\\SEM_backfit_regions_alt.csv', encoding='utf-8') # Version for pristine grains on graphene", "# Run main quant loop (not autosaved so use to_csv save below after", "overwrite backgrounds... use ones made with interactive refitter Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf,", "\"choose data directory\") filelist=glob.glob('*.psmsa')+glob.glob('*.emsa') # psmsa option #%% Main file processing loop for", "need to convert psmsa into csv ... just always strip header when opening", "but not of background fits; no effect on new spectra kwargs.update({'redo_integration':False}) # defaults", "# Redo integlog, peakfits if any backfits were changed (first reload saved changes", "but include fits pkwargs.update({'yrange':[-500,3000]}) # optional y range for plot.. defaults to data", "proceed to EDX_quant_main for interference adjustments, \\\\osition calcs, etc. # Renaming of troublesome", "Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog, Backfitlog, Peakfitlog, Integlog) #%% Run interactive EDXrefitter (if", "below after checks) kwargs={} Backfitlog, Peakfitlog, Integlog= EDXimport.batchEDXquant(EDXlog, Fitregionsdf, EDXquantparams, Elements, Backfitlog, Integlog,", "index=False) # After successful refit of subset of files, find/replace entries in original", "were made during quant of this data, load local version stored with data", "find/replace entries in original logbooks (saves after finishing) Backfitlog, Peakfitlog, Integlog = EDXimport.replacelogentries(EDXlog,", "#%% Load modules import glob, sys, os # already run with functions import", "not refit or overwrite backgrounds... use ones made with interactive refitter Backfitlog, Peakfitlog," ]
[ "dark_plots = True n_sig = 8 n_print_sigfigs = 3 if dark_plots: dark='darkbg/' q", "Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources", "The other notebook stored the pickle in the same folder if SaveFitFigs: if", "%% # nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename", "= {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n =", "SaveFitData = True dpiN = 1000 dark_plots = True n_sig = 8 n_print_sigfigs", "Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %% for Hekey in", "location {}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc = 'upper", "scipy.signal.windows import hann from copy import deepcopy from scipy.stats import chi2 # %%", "}) else: # %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font = { # 'weight' :", "%% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs = json.load(fp) # %% params_dims_locs #", "domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes calculated at locations inside He spheroid\") for freq", "'font.size':13, }) else: # %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font = { # 'weight'", "Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut = pickle.load(file_obj) #", "in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey in", "= json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+", "domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] =", "scipy.stats import chi2 # %% SaveFitFigs = True # SaveFitData = True dpiN", "'legend.labelspacing': 0.45, # 'text.usetex': True, 'font.size':13, }) else: # %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault)", "# %% Sample_settings = { 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar location':0, #", "# 'fit data filename':fitdatafilename } Exp2_all_settings = { 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings,", "(np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu] = {}", "# FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu] = {} B_max_table = {} for", "freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey]", "plt.title('Contribution of impurities to field at $^3$He location \\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1]))", "$^3$He location {}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc =", "import hann from copy import deepcopy from scipy.stats import chi2 # %% SaveFitFigs", "mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7, #'legend.handleheight': 0.4,", "arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5)", "%% from B_calc_script import FieldAtAnyLocation from B_calc_script import signif # %% # %autoreload", "= '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook stored the pickle in the same folder", "= Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] = {} Field_At_He_location_for_FFT['B freq domain'] = {} numsamples", "data filename':fitdatafilename } Exp2_all_settings = { 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings,", "filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %% [markdown] # # Calculate field at", "'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %%", "if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %% if dark_plots: mpl.rcParams.update(q) # %matplotlib inline", "# %% for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num = 0 B_max", "# %% from B_calc_script import FieldAtAnyLocation from B_calc_script import signif # %% #", "'../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook stored the pickle in the same folder if", "axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B", "filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle:", "} Exp2_save_settings ={ 'save fit data':False, # 'fit data filename':fitdatafilename } Exp2_all_settings =", "= (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\", "domain'][Hekey][axiskey]) ,label = Hekey ,alpha = 1-i_num/4) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey])", "fp: params_dims_locs = json.load(fp) # %% params_dims_locs # %% rtr_dims = params_dims_locs['rotor_dims'] for", "= json.load(fp) # %% params_dims_locs # %% rtr_dims = params_dims_locs['rotor_dims'] for key in", "{} |'.format(Hekey), end = \" \") print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\"", "[markdown] # # FFT Field at Sample Location # %% n_reps = 50", "|\",end=\"\") for Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %% for", "settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) #", "0 max_at_11nu = 0 for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq", "Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] = {} for", "# Calculate field at sample location # # %% Sample_settings = { 'rotor", "mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/'", "'number of sources':3, 'location dimensions':3, 'moment dimensions':3, 'location coordinate system':'polar', 'moment coordinate system':'polar',", "New Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext autoreload # %%", "%% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]), #theta positive for", "spheroid\") for freq in FFT_amp_table.keys(): print('{} Hz'.format(freq), end = \"\\n\") print(\"-------------------\") print(\"Axis |\",", "Hz (AV X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs", "notebook stored the pickle in the same folder if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2):", "parameters ## # %% nu = 5 # %% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as", "as file_obj: Exp2_data_cut = pickle.load(file_obj) # %% [markdown] # ## Load parameters ##", "pickle.load(filehandle) # %% [markdown] # # Calculate field at sample location # #", "%% [markdown] # # FFT Field at Sample Location # %% n_reps =", "of impurities to field at $^3$He location \\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic", "domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq domain'][Hekey] = {} for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B", "%% Sample_settings = { 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar location':0, # 'DC", "component (pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches = 'tight',dpi =", "\" \") for Hekey in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end = \" \")", "freq domain'][Hekey][axiskey]) ,label = Hekey ,alpha = 1-i_num/4) i_num +=1 B_max = max(B_max,", "# ,360+Exp2_data_cut['theta avg'][nu] ]), #theta positive for ac, negative for clockwise 'B':{ '3He", "} Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] # # FFT", "= {} FFT_amp_table[11*nu] = {} B_max_table = {} for Hekey in Field_At_He_location_for_FFT['B freq", "domain'][Hekey][axiskey]) ,label = axiskey ,alpha = 1-i_num/3) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey])", "He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle) #", "plt import matplotlib as mpl import time import os import pickle import json", "positive for ac, negative for clockwise 'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) },", "= {} for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] =", "= Hekey ,alpha = 1-i_num/4) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu =", "max_at_11nu = 0 for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey])", "plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at $^3$He location \\n ({:0.1f} s", "plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = axiskey ,alpha = 1-i_num/3) i_num +=1 B_max", "FFT_amp_table = {} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu] = {} B_max_table", "# %% print(\"FFT Amplitudes calculated at locations inside He spheroid\") for freq in", "'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save fit data':False, # 'fit data", "numsamples,d=binsize) # %% for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] = {} Field_At_He_location_for_FFT['B", "# %% # %load_ext autoreload # %% from B_calc_script import FieldAtAnyLocation from B_calc_script", "5 # %% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs = json.load(fp) # %%", "= { 'print':True, 'number of sources':3, 'location dimensions':3, 'moment dimensions':3, 'location coordinate system':'polar',", "if dark_plots: mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing':", "{ 'plot':True, # 'memo':'{} Hz (AV X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs,", "print(\"-------------------\") print(\"Axis |\", end = \" \") for Hekey in FFT_amp_table[freq].keys(): print('Sensor {}", "max_at_nu = 0 max_at_11nu = 0 for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq']", "SaveFitData: # if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %% if dark_plots: mpl.rcParams.update(q) #", "(pT)') plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN)", "file_obj: Exp2_data_cut = pickle.load(file_obj) # %% [markdown] # ## Load parameters ## #", "# 'chi tolerance':10, 'optimize DC shifts':True, 'optimize bar location':True, 'significant figures':n_sig } Exp2_plot_settings", "Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm =", "#'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad':", "# SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook stored the", "axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\")", "params_dims_locs = json.load(fp) # %% params_dims_locs # %% rtr_dims = params_dims_locs['rotor_dims'] for key", "|\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %% for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num", "of impurities to field at $^3$He location {}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic", "{ # 'weight' : 'normal', 'size' : 15, 'family': 'Times New Roman'} plt.rc('font',", "if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData: # if not os.path.exists(SaveDataDir_Exp2):", "plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches", "# # Calculate field at sample location # # %% Sample_settings = {", "'3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, } } # %% # nowtext =", "= {} Field_At_He_location_for_FFT['B freq domain'] = {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2]", "He spheroid\") for freq in FFT_amp_table.keys(): print('{} Hz'.format(freq), end = \"\\n\") print(\"-------------------\") print(\"Axis", ",360+Exp2_data_cut['theta avg'][nu] ]), #theta positive for ac, negative for clockwise 'B':{ '3He 1':{", "Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu", "= 0 max_at_11nu = 0 for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B", "if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData: # if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2)", "with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut = pickle.load(file_obj) # %% [markdown] # ## Load", "# Load data # %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb') as", "json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0]", "params_dims_locs['rotor_dims'] for key in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He locations'])", "'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45, #", "'_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True, 'number", "import time import os import pickle import json from scipy import fft from", "params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources =", "Exp2_optimization_settings = { 'print':True, 'number of sources':3, 'location dimensions':3, 'moment dimensions':3, 'location coordinate", "SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True, 'number of sources':3, 'location", "avg'][nu] ]), #theta positive for ac, negative for clockwise 'B':{ '3He 1':{ 'Z':np.array([]),", "max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\", "domain']['1'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu = 0 max_at_11nu =", "\"forward\") # %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} #", "= {} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu] = {} B_max_table =", "now = datetime.now import matplotlib.pyplot as plt import matplotlib as mpl import time", "from datetime import datetime now = datetime.now import matplotlib.pyplot as plt import matplotlib", "'optimize bar location':True, 'significant figures':n_sig } Exp2_plot_settings = { 'plot':True, # 'memo':'{} Hz", "X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings", "'utf-8') as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar =", "Exp2_plot_settings = { 'plot':True, # 'memo':'{} Hz (AV X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu),", "{:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %% for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure()", "Exp2_data_cut = pickle.load(file_obj) # %% [markdown] # ## Load parameters ## # %%", "binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) # %% for Hekey", "dark_plots: mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6,", "# # %% Sample_settings = { 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar location':0,", "freq domain'] = {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']=", "# %% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location']", "= np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes calculated at locations inside He", "LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook stored the pickle in the same", "scipy.optimize import minimize from datetime import datetime now = datetime.now import matplotlib.pyplot as", "#picoTesla } # %% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]),", "50 # %% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] =", "else: # %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font = { # 'weight' : 'normal',", "= '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook stored the pickle in", "import fft from scipy.signal.windows import hann from copy import deepcopy from scipy.stats import", ",label = axiskey ,alpha = 1-i_num/3) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu", "{ 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]), #theta positive for ac, negative for", "Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu = 0", "{} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize)", "loadmat import numpy as np # from scipy.optimize import minimize from datetime import", "= {} Field_At_He_location_for_FFT['B freq domain'][Hekey] = {} for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time", "chi2 # %% SaveFitFigs = True # SaveFitData = True dpiN = 1000", "Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %% indnu =", "{} B_max_table[Hekey] = {} for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B", "= axiskey ,alpha = 1-i_num/3) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu =", "[Exp2_Opt_Params_3_sources[-1]] # %% # with open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle) #", "datetime now = datetime.now import matplotlib.pyplot as plt import matplotlib as mpl import", "for Hekey in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with", "from scipy.optimize import minimize from datetime import datetime now = datetime.now import matplotlib.pyplot", "fit data':False, # 'fit data filename':fitdatafilename } Exp2_all_settings = { 'experiment settings':Sample_settings, 'data':Data_At_Sample,", "for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = Hekey", "Hekey in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb')", "'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings)", "open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs = json.load(fp) # %% params_dims_locs # %% rtr_dims", "'memo':'{} Hz (AV X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print", "Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %% # with open('../Params/Params_4sources.pk','rb') as", "in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time", "for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey]", "avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]), #theta positive for ac, negative for clockwise 'B':{", "import chi2 # %% SaveFitFigs = True # SaveFitData = True dpiN =", "time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table", "FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center')", "Location # %% n_reps = 50 # %% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] =", "'Y':np.array([]), 'X':np.array([]) }, } } # %% # nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext =", "'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, } } # %% # nowtext", "#theta positive for ac, negative for clockwise 'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([])", "# %load_ext autoreload # %% from B_calc_script import FieldAtAnyLocation from B_calc_script import signif", "= \" \") for Hekey in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end = \"", "= np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes", "in FFT_amp_table.keys(): print('{} Hz'.format(freq), end = \"\\n\") print(\"-------------------\") print(\"Axis |\", end = \"", "plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN) # %% for axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys():", "other notebook stored the pickle in the same folder if SaveFitFigs: if not", "({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc = 'upper left') if", "} Exp2_plot_settings = { 'plot':True, # 'memo':'{} Hz (AV X&Y inverted)'.format(nu), # 'memo':'{}", "horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of", "inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7, #'legend.handleheight':", "print(\"-------------------\") # %% for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num = 0", "Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] # # FFT Field", "field at sample location # # %% Sample_settings = { 'rotor dimensions':rtr_dims, 'sensor", "data':False, # 'fit data filename':fitdatafilename } Exp2_all_settings = { 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization", "# %% # with open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %%", "LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut = pickle.load(file_obj) # %% [markdown]", "%% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %% #", "Hekey in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end = \" \") print(\"\\n\") for axiskey", "import datetime now = datetime.now import matplotlib.pyplot as plt import matplotlib as mpl", "[markdown] # # Load data # %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with", "'../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook stored", "# 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla } # %% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta", "# %% for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq", "Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle: Exp2_Opt_Params_3_sources =", "'fit data filename':fitdatafilename } Exp2_all_settings = { 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot", "'tight',dpi = dpiN) # %% for axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num", "Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] = {} Field_At_He_location_for_FFT['B freq domain'] = {} numsamples =", "from copy import deepcopy from scipy.stats import chi2 # %% SaveFitFigs = True", "%% # %load_ext autoreload # %% from B_calc_script import FieldAtAnyLocation from B_calc_script import", "{ 'print':True, 'number of sources':3, 'location dimensions':3, 'moment dimensions':3, 'location coordinate system':'polar', 'moment", "import loadmat import numpy as np # from scipy.optimize import minimize from datetime", "not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %% if dark_plots: mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({", "s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs:", "Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) # %% for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time", "field (pT)') plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi =", "for ac, negative for clockwise 'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, }", "ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu]", "locations']) for Hekey in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% #", "# %% for axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num = 0 B_max", "{} Field_At_He_location_for_FFT['B freq domain'][Hekey] = {} for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]", "freq domain'][Hekey][axiskey]) ,label = axiskey ,alpha = 1-i_num/3) i_num +=1 B_max = max(B_max,", "= 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu =", "= {} for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max()", "0 for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label =", "np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max()", "# %% with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut = pickle.load(file_obj) # %% [markdown] #", "autoreload # %% from B_calc_script import FieldAtAnyLocation from B_calc_script import signif # %%", "{} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] = {} for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys():", "\") for Hekey in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end = \" \") print(\"\\n\")", "minimize from datetime import datetime now = datetime.now import matplotlib.pyplot as plt import", "freq in FFT_amp_table.keys(): print('{} Hz'.format(freq), end = \"\\n\") print(\"-------------------\") print(\"Axis |\", end =", "in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = axiskey ,alpha =", "<gh_stars>0 # %% from scipy.io import loadmat import numpy as np # from", "with open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %% [markdown] # #", "q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2", "domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] = {} for axiskey in", "# %% n_reps = 50 # %% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu", "# %matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6, # 'legend.handlelength':", "np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %% indnu", "Sample_settings = { 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz]", "Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %% # with open('../Params/Params_4sources.pk','rb') as filehandle:", "%% SaveFitFigs = True # SaveFitData = True dpiN = 1000 dark_plots =", "from B_calc_script import FieldAtAnyLocation from B_calc_script import signif # %% # %autoreload 2", "+ [Exp2_Opt_Params_3_sources[-1]] # %% # with open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle)", "Sample Location # %% n_reps = 50 # %% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time']", "# Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %%", "axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu", "indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu]", "## # %% nu = 5 # %% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp:", "Hekey ,alpha = 1-i_num/4) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu,", "open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut = pickle.load(file_obj) # %% [markdown] # ## Load parameters", "# %% if dark_plots: mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25,", "sources':3, 'location dimensions':3, 'moment dimensions':3, 'location coordinate system':'polar', 'moment coordinate system':'polar', # 'chi", "4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu)", "filename':fitdatafilename } Exp2_all_settings = { 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save", "True, 'font.size':13, }) else: # %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font = { #", "%% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut = pickle.load(file_obj)", "# %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %%", "= 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at $^3$He", "impurities to field at $^3$He location {}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field", "(pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches = 'tight',dpi = dpiN)", "= {} B_max_table = {} for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] =", "freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = axiskey ,alpha = 1-i_num/3) i_num", "# os.makedirs(SaveDataDir_Exp2) # %% if dark_plots: mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3,", "if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN) # %% for axiskey in Field_At_He_location_for_FFT['B", "Field_At_He_location_for_FFT['B freq domain'] = {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1]", "as mpl import time import os import pickle import json from scipy import", "import FieldAtAnyLocation from B_calc_script import signif # %% # %autoreload 2 # %%", "in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %% for Hekey in Field_At_He_location_for_FFT['B", "for key in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for", "as plt import matplotlib as mpl import time import os import pickle import", "= True n_sig = 8 n_print_sigfigs = 3 if dark_plots: dark='darkbg/' q =", "%% if dark_plots: mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, #", "# %% # nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' #", "not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData: # if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) #", "'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 #", "= 0 for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label", "as fp: params_dims_locs = json.load(fp) # %% params_dims_locs # %% rtr_dims = params_dims_locs['rotor_dims']", "'Times New Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext autoreload #", "in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] = {}", "**font) # mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext autoreload # %% from B_calc_script import", "Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] = {} Field_At_He_location_for_FFT['B freq domain'] = {}", "0 for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label =", "end = \" \") print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for", "'weight' : 'normal', 'size' : 15, 'family': 'Times New Roman'} plt.rc('font', **font) #", "{} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] = {} Field_At_He_location_for_FFT['B freq domain'] =", "= params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources", "import pickle import json from scipy import fft from scipy.signal.windows import hann from", "0.2, # 'legend.labelspacing': 0.45, # 'text.usetex': True, 'font.size':13, }) else: # %matplotlib inline", "field at $^3$He location \\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component", "= 'utf-8') as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar", "%% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] = {} Field_At_He_location_for_FFT['B", "= { 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] #", "= max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6,", "'moment dimensions':3, 'location coordinate system':'polar', 'moment coordinate system':'polar', # 'chi tolerance':10, 'optimize DC", "# %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut =", "## Load parameters ## # %% nu = 5 # %% with open('../Params/Exp2_dimensions_and_locations.json',", "measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches", "system':'polar', # 'chi tolerance':10, 'optimize DC shifts':True, 'optimize bar location':True, 'significant figures':n_sig }", "# %% from scipy.io import loadmat import numpy as np # from scipy.optimize", "# %% [markdown] # # Calculate field at sample location # # %%", "inside He spheroid\") for freq in FFT_amp_table.keys(): print('{} Hz'.format(freq), end = \"\\n\") print(\"-------------------\")", "Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %% [markdown] # # Calculate field at sample location", "= { 'plot':True, # 'memo':'{} Hz (AV X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False,", "to field at $^3$He location {}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)')", "He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle:", "deepcopy from scipy.stats import chi2 # %% SaveFitFigs = True # SaveFitData =", "at locations inside He spheroid\") for freq in FFT_amp_table.keys(): print('{} Hz'.format(freq), end =", "[0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %% # with open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources =", "SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData: # if not os.path.exists(SaveDataDir_Exp2): #", "plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN) # %%", "{} Field_At_He_location_for_FFT['B freq domain'] = {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] -", "matplotlib as mpl import time import os import pickle import json from scipy", "Field_At_He_location_for_FFT['B freq domain'][Hekey] = {} for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] =", "for Hekey in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end = \" \") print(\"\\n\") for", "max_at_nu = 0 max_at_11nu = 0 for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq']", "plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi", "Field_At_He_location_for_FFT['B time domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq domain'][Hekey] = {} for axiskey in", "{}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc = 'upper left')", "B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy", "FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] = {} for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey]", "# 'weight' : 'normal', 'size' : 15, 'family': 'Times New Roman'} plt.rc('font', **font)", "import matplotlib as mpl import time import os import pickle import json from", "'plot':True, # 'memo':'{} Hz (AV X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN,", "3 if dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2", "rtr_dims[key] = signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey in He_sample_locations.keys(): string_to_parse", "'deltaB':1 #picoTesla } # %% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu]", "location \\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc", "FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %% print(\"FFT", "= max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy =", "domain'] = {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n", "field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq domain'][Hekey] = {} for axiskey", ",np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = Hekey ,alpha = 1-i_num/4) i_num +=1 B_max =", "'save settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] #", "print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %% for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys():", "np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes calculated", "from B_calc_script import signif # %% # %autoreload 2 # %% # %%", "%% with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut = pickle.load(file_obj) # %% [markdown] # ##", "Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] #", "else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2", "#'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45, # 'text.usetex': True, 'font.size':13, }) else: # %matplotlib", "field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm", "os.makedirs(SaveDataDir_Exp2) # %% if dark_plots: mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad':", "shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla } # %% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] #", "in the same folder if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData:", "# %% [markdown] # # Load data # %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' #", "# %% [markdown] # # FFT Field at Sample Location # %% n_reps", "'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches = 'tight',dpi = dpiN) # %% # %%", "signif # %% # %autoreload 2 # %% # %% [markdown] # #", "# %% rtr_dims = params_dims_locs['rotor_dims'] for key in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) #", "dpiN) # %% for axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num = 0", "numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) #", "%% for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq domain'][Hekey]", "'moment coordinate system':'polar', # 'chi tolerance':10, 'optimize DC shifts':True, 'optimize bar location':True, 'significant", "for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num = 0 B_max = 0", "# if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %% if dark_plots: mpl.rcParams.update(q) # %matplotlib", "max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy", "plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN) #", "if dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 =", "rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey in He_sample_locations.keys():", "# %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font = { # 'weight' : 'normal', 'size'", "plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext autoreload # %% from B_calc_script", "%% # with open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %% [markdown]", "n_print_sigfigs = 3 if dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/'", "#'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45, # 'text.usetex': True, 'font.size':13, }) else:", "signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He", "=deepcopy(params_dims_locs['3He locations']) for Hekey in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %%", "with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs = json.load(fp) # %% params_dims_locs # %%", "}, } } # %% # nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename", "'location dimensions':3, 'moment dimensions':3, 'location coordinate system':'polar', 'moment coordinate system':'polar', # 'chi tolerance':10,", "location':True, 'significant figures':n_sig } Exp2_plot_settings = { 'plot':True, # 'memo':'{} Hz (AV X&Y", "# nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename =", "'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save fit", "for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq domain'][Hekey] =", "horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at $^3$He location {}\\n", "\\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc =", "B_max = 0 max_at_nu = 0 max_at_11nu = 0 for axiskey in Field_At_He_location_for_FFT['B", "= '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook", "nu = 5 # %% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs = json.load(fp)", "print(\"Axis |\", end = \" \") for Hekey in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey),", "0 B_max = 0 max_at_nu = 0 max_at_11nu = 0 for Hekey in", "B_max_table = {} for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey]", "2 # %% # %% [markdown] # # Load data # %% Exp2_data_filename", "dimensions':3, 'location coordinate system':'polar', 'moment coordinate system':'polar', # 'chi tolerance':10, 'optimize DC shifts':True,", "# FFT Field at Sample Location # %% n_reps = 50 # %%", "FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width", "rtr_dims = params_dims_locs['rotor_dims'] for key in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) # %% He_sample_locations", "Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] # # FFT Field at Sample", "FFT_amp_table.keys(): print('{} Hz'.format(freq), end = \"\\n\") print(\"-------------------\") print(\"Axis |\", end = \" \")", "freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = Hekey ,alpha = 1-i_num/4) i_num", "{ 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1", "max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\", "(11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field", "arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at", "time domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq domain'][Hekey] = {} for axiskey in field_at_sample[Hekey].keys():", "{} for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] = {}", "%% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']'))", "import os import pickle import json from scipy import fft from scipy.signal.windows import", "datetime.now import matplotlib.pyplot as plt import matplotlib as mpl import time import os", "0.6, # 'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45, # 'text.usetex':", "'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN) # %% for axiskey", "in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu =", "= params_dims_locs['rotor_dims'] for key in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He", "as filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as", "} # %% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]), #theta", "for freq in FFT_amp_table.keys(): print('{} Hz'.format(freq), end = \"\\n\") print(\"-------------------\") print(\"Axis |\", end", "if SaveFitData: # if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %% if dark_plots: mpl.rcParams.update(q)", "np # from scipy.optimize import minimize from datetime import datetime now = datetime.now", "He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) #", "# # FFT Field at Sample Location # %% n_reps = 50 #", "= SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True, 'number of sources':3,", "= { 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params", "|\", end = \" \") for Hekey in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end", "Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = Hekey ,alpha", "in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\")", "# 'text.usetex': True, 'font.size':13, }) else: # %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font =", "# 'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45, # 'text.usetex': True,", "True n_sig = 8 n_print_sigfigs = 3 if dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark')", "mpl import time import os import pickle import json from scipy import fft", ",alpha = 1-i_num/4) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey])", "FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] = {} for axiskey in Field_At_He_location_for_FFT['B", "B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width", "FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] # # FFT Field at Sample Location # %%", ": 15, 'family': 'Times New Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) # %% #", "with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources #", "open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %% [markdown] # # Calculate", "'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save fit data':False, # 'fit data filename':fitdatafilename", "={ 'save fit data':False, # 'fit data filename':fitdatafilename } Exp2_all_settings = { 'experiment", "coordinate system':'polar', # 'chi tolerance':10, 'optimize DC shifts':True, 'optimize bar location':True, 'significant figures':n_sig", "= datetime.now import matplotlib.pyplot as plt import matplotlib as mpl import time import", "DC shifts':True, 'optimize bar location':True, 'significant figures':n_sig } Exp2_plot_settings = { 'plot':True, #", "= np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %%", ",label = Hekey ,alpha = 1-i_num/4) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu", "= SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True, 'number of sources':3, 'location dimensions':3, 'moment dimensions':3,", "fft.rfftfreq(n = numsamples,d=binsize) # %% for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] =", "%% # %autoreload 2 # %% # %% [markdown] # # Load data", "n_sig = 8 n_print_sigfigs = 3 if dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else:", "'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45, # 'text.usetex': True, 'font.size':13,", "mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The", "negative for clockwise 'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, } } #", "# with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding", "Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]), #theta positive for ac,", "= 0 max_at_11nu = 0 for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B", "%matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font = { # 'weight' : 'normal', 'size' :", "font = { # 'weight' : 'normal', 'size' : 15, 'family': 'Times New", "= 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN) # %% for", "domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %%", "= signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey in He_sample_locations.keys(): string_to_parse =", "Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC =", "0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45, # 'text.usetex': True, 'font.size':13, }) else: #", "max_at_11nu = 0 for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey])", "# Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle: Exp2_Opt_Params_3_sources", "sample location # # %% Sample_settings = { 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, #", "in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq domain'][Hekey] = {} for", "plt.title('Contribution of impurities to field at $^3$He location {}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1]))", "pickle import json from scipy import fft from scipy.signal.windows import hann from copy", "Exp2_all_settings = { 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings }", "# ## Load parameters ## # %% nu = 5 # %% with", "at $^3$He location {}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc", "1-i_num/4) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu =", "open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8')", "'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla", "(AV X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs }", "print(\"\\n\") print(\"-------------------\") # %% for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num =", "= Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] # # FFT Field at", "freq domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes calculated at locations inside He spheroid\") for", "|'.format(Hekey), end = \" \") print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\")", "1000 dark_plots = True n_sig = 8 n_print_sigfigs = 3 if dark_plots: dark='darkbg/'", "# %% # %% [markdown] # # Load data # %% Exp2_data_filename =", "plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at $^3$He location \\n ({:0.1f}", "import deepcopy from scipy.stats import chi2 # %% SaveFitFigs = True # SaveFitData", "{} for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]", "tolerance':10, 'optimize DC shifts':True, 'optimize bar location':True, 'significant figures':n_sig } Exp2_plot_settings = {", "freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes calculated at", "B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes calculated at locations inside", "0 max_at_nu = 0 max_at_11nu = 0 for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys():", "time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\")", "import json from scipy import fft from scipy.signal.windows import hann from copy import", "%% [markdown] # # Calculate field at sample location # # %% Sample_settings", "= 1-i_num/3) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu", "# %% nu = 5 # %% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs", "# %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist()", "for Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %% for Hekey", "i_num = 0 B_max = 0 max_at_nu = 0 max_at_11nu = 0 for", "max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width =", "settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown]", "{} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches = 'tight',dpi", "sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save fit data':False, # 'fit data filename':fitdatafilename } Exp2_all_settings", "# %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} # FFT_amp_table['frequency']", "import matplotlib.pyplot as plt import matplotlib as mpl import time import os import", "numpy as np # from scipy.optimize import minimize from datetime import datetime now", "%% params_dims_locs # %% rtr_dims = params_dims_locs['rotor_dims'] for key in rtr_dims: rtr_dims[key] =", ",alpha = 1-i_num/3) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey])", "Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] = {} Field_At_He_location_for_FFT['B freq", "dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/'", "fft from scipy.signal.windows import hann from copy import deepcopy from scipy.stats import chi2", "= numsamples,d=binsize) # %% for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] = {}", "end = \" \") for Hekey in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end =", "'location coordinate system':'polar', 'moment coordinate system':'polar', # 'chi tolerance':10, 'optimize DC shifts':True, 'optimize", "stored the pickle in the same folder if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2)", "B_calc_script import signif # %% # %autoreload 2 # %% # %% [markdown]", "'optimize DC shifts':True, 'optimize bar location':True, 'significant figures':n_sig } Exp2_plot_settings = { 'plot':True,", "print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for Hekey in FFT_amp_table[freq].keys(): print(\"", "Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext autoreload # %% from", "in He_sample_locations.keys(): string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as", "= 8 n_print_sigfigs = 3 if dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark", "= dpiN) # %% for axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num =", "= (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] =", "n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) # %% for", "for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] =", "nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk'", "= {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] = {} Field_At_He_location_for_FFT['B freq domain']", "domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu", "for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] =", "0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45, # 'text.usetex': True, 'font.size':13, })", "calculated at locations inside He spheroid\") for freq in FFT_amp_table.keys(): print('{} Hz'.format(freq), end", "%autoreload 2 # %% # %% [markdown] # # Load data # %%", "= \"\\n\") print(\"-------------------\") print(\"Axis |\", end = \" \") for Hekey in FFT_amp_table[freq].keys():", "params_dims_locs # %% rtr_dims = params_dims_locs['rotor_dims'] for key in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig)", "end = \"\\n\") print(\"-------------------\") print(\"Axis |\", end = \" \") for Hekey in", "print('{} Hz'.format(freq), end = \"\\n\") print(\"-------------------\") print(\"Axis |\", end = \" \") for", "for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f}", "True dpiN = 1000 dark_plots = True n_sig = 8 n_print_sigfigs = 3", "system':'polar', 'moment coordinate system':'polar', # 'chi tolerance':10, 'optimize DC shifts':True, 'optimize bar location':True,", "FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\")", "%% # %% [markdown] # # Load data # %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk'", "locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle)", "= Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %% # with open('../Params/Params_4sources.pk','rb')", "# %% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]), #theta positive", "= 0 for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label", "'chi tolerance':10, 'optimize DC shifts':True, 'optimize bar location':True, 'significant figures':n_sig } Exp2_plot_settings =", "= pickle.load(file_obj) # %% [markdown] # ## Load parameters ## # %% nu", ",np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = axiskey ,alpha = 1-i_num/3) i_num +=1 B_max =", "axiskey ,alpha = 1-i_num/3) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu,", "scipy.io import loadmat import numpy as np # from scipy.optimize import minimize from", "FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B", "= '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True,", "domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes calculated at locations", "for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = axiskey", "} # %% # nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png'", "'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla } # %% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu]", "locations inside He spheroid\") for freq in FFT_amp_table.keys(): print('{} Hz'.format(freq), end = \"\\n\")", "location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla } # %% Data_At_Sample = {", "to field at $^3$He location \\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {}", "for clockwise 'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, } } # %%", "[markdown] # ## Load parameters ## # %% nu = 5 # %%", "'print sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save fit data':False, # 'fit data filename':fitdatafilename }", "os import pickle import json from scipy import fft from scipy.signal.windows import hann", "%load_ext autoreload # %% from B_calc_script import FieldAtAnyLocation from B_calc_script import signif #", "%% [markdown] # ## Load parameters ## # %% nu = 5 #", "= mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 =", "# Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %% [markdown] # # Calculate field at sample", "# from scipy.optimize import minimize from datetime import datetime now = datetime.now import", "settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] # #", "field_at_sample = FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] # # FFT Field at Sample Location", "print(\" \"+axiskey+\" |\",end=\"\") for Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") #", "Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = Hekey ,alpha = 1-i_num/4)", "Load data # %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb') as file_obj:", "s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper left')", "Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %% # with open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources", "datetime import datetime now = datetime.now import matplotlib.pyplot as plt import matplotlib as", "as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5]", "= 1000 dark_plots = True n_sig = 8 n_print_sigfigs = 3 if dark_plots:", "# mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext autoreload # %% from B_calc_script import FieldAtAnyLocation", "= 50 # %% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain']", "= 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches = 'tight',dpi = dpiN) # %% #", "folder if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData: # if not", "= (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to", "SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True, 'number of sources':3, 'location dimensions':3, 'moment dimensions':3, 'location", "plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = Hekey ,alpha = 1-i_num/4) i_num +=1 B_max", "= n_reps*Field_At_He_location_for_FFT['time'].size binsize = Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) # %%", "Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey] = {} Field_At_He_location_for_FFT['B freq domain'][Hekey] = {}", "]), #theta positive for ac, negative for clockwise 'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]),", "fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True, 'number of", "location # # %% Sample_settings = { 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar", "$^3$He location \\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey)) plt.grid()", "pickle in the same folder if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if", "+=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey])", "= { # 'weight' : 'normal', 'size' : 15, 'family': 'Times New Roman'}", "bar location':True, 'significant figures':n_sig } Exp2_plot_settings = { 'plot':True, # 'memo':'{} Hz (AV", "1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)')", "dpiN = 1000 dark_plots = True n_sig = 8 n_print_sigfigs = 3 if", "from scipy import fft from scipy.signal.windows import hann from copy import deepcopy from", "# 'memo':'{} Hz (AV X&Y inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename,", "# %autoreload 2 # %% # %% [markdown] # # Load data #", "'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, } } # %% # nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext", "# 'legend.labelspacing': 0.45, # 'text.usetex': True, 'font.size':13, }) else: # %matplotlib inline #", "# # Load data # %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb')", "FFT_amp_table[nu] = {} FFT_amp_table[11*nu] = {} B_max_table = {} for Hekey in Field_At_He_location_for_FFT['B", "= \" \") print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for Hekey", "locations':He_sample_locations, # 'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla } # %%", "fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True, 'number of sources':3, 'location dimensions':3, 'moment", "print(\"FFT Amplitudes calculated at locations inside He spheroid\") for freq in FFT_amp_table.keys(): print('{}", "= max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy =", "open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources # %%", "scipy import fft from scipy.signal.windows import hann from copy import deepcopy from scipy.stats", "B_max_table[Hekey] = {} for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq", "import numpy as np # from scipy.optimize import minimize from datetime import datetime", "} Exp2_all_settings = { 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings", "domain'][Hekey] = {} for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B", "# 'deltaB':1 #picoTesla } # %% Data_At_Sample = { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta", "domain'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu = 0 max_at_11nu =", "FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %% for Hekey in Field_At_He_location_for_FFT['B freq", "= 0 B_max = 0 max_at_nu = 0 max_at_11nu = 0 for Hekey", "0 max_at_nu = 0 max_at_11nu = 0 for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys():", "measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper left') if", "hann from copy import deepcopy from scipy.stats import chi2 # %% SaveFitFigs =", "'significant figures':n_sig } Exp2_plot_settings = { 'plot':True, # 'memo':'{} Hz (AV X&Y inverted)'.format(nu),", "%% rtr_dims = params_dims_locs['rotor_dims'] for key in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) # %%", "\" \") print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for Hekey in", "plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities", "freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() #", "%% for axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num = 0 B_max =", "%% n_reps = 50 # %% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B", "Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) # %% for Hekey in field_at_sample.keys():", ": 'normal', 'size' : 15, 'family': 'Times New Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'})", "freq domain']['1'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu = 0 max_at_11nu", "0.25, # 'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing':", "%matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7,", "'print':True, 'number of sources':3, 'location dimensions':3, 'moment dimensions':3, 'location coordinate system':'polar', 'moment coordinate", "os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %% if dark_plots: mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({ #'legend.borderpad':", "Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu = 0", "mpl.rcParams.update(q) # %matplotlib inline mpl.rcParams.update({ #'legend.borderpad': 0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6, #", "Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = axiskey ,alpha = 1-i_num/3)", "as filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %% [markdown] # # Calculate field", "= np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max() B_max_table[Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq", "Load parameters ## # %% nu = 5 # %% with open('../Params/Exp2_dimensions_and_locations.json', 'r')", "the same folder if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData: #", "SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other", "freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu)", "{} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu] = {} B_max_table = {}", "'size' : 15, 'family': 'Times New Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) # %%", "import minimize from datetime import datetime now = datetime.now import matplotlib.pyplot as plt", "np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]).max() # %% print(\"FFT Amplitudes calculated at locations inside He spheroid\")", "for axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure() i_num = 0 B_max = 0", "from scipy.io import loadmat import numpy as np # from scipy.optimize import minimize", "inline # mpl.rcParams.update(mpl.rcParamsDefault) font = { # 'weight' : 'normal', 'size' : 15,", "inverted)'.format(nu), # 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings ={", "B_max = 0 max_at_nu = 0 max_at_11nu = 0 for Hekey in Field_At_He_location_for_FFT['B", "'sensor locations':He_sample_locations, # 'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla } #", "{} for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey]", "copy import deepcopy from scipy.stats import chi2 # %% SaveFitFigs = True #", "field at $^3$He location {}\\n ({:0.1f} s measurement duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid()", "} } # %% # nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename =", "dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 =", "# %% [markdown] # ## Load parameters ## # %% nu = 5", "left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN) # %% for axiskey in", "mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext autoreload # %% from B_calc_script import FieldAtAnyLocation from", "freq domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] = {} for axiskey", "%% from scipy.io import loadmat import numpy as np # from scipy.optimize import", "8 n_print_sigfigs = 3 if dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark =", "# with open('../Params/Params_4sources.pk','rb') as filehandle: # Exp2_Opt_Params_4_sources = pickle.load(filehandle) # %% [markdown] #", "max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu, FFT_amp_table[11*nu][Hekey][axiskey]) plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\", "shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution", "'family': 'Times New Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext autoreload", "json.load(fp) # %% params_dims_locs # %% rtr_dims = params_dims_locs['rotor_dims'] for key in rtr_dims:", "Amplitudes calculated at locations inside He spheroid\") for freq in FFT_amp_table.keys(): print('{} Hz'.format(freq),", "= \"forward\") # %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {}", "SaveFitFigs = True # SaveFitData = True dpiN = 1000 dark_plots = True", "(Hz)') plt.title('Contribution of impurities to field at $^3$He location \\n ({:0.1f} s measurement", "from scipy.stats import chi2 # %% SaveFitFigs = True # SaveFitData = True", "clockwise 'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, } } # %% #", "mpl.rcParams.update(mpl.rcParamsDefault) font = { # 'weight' : 'normal', 'size' : 15, 'family': 'Times", "= 'tight',dpi = dpiN) # %% for axiskey in Field_At_He_location_for_FFT['B freq domain']['1'].keys(): plt.figure()", "SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook stored the pickle", "{} FFT_amp_table[11*nu] = {} B_max_table = {} for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys():", "\"\\n\") print(\"-------------------\") print(\"Axis |\", end = \" \") for Hekey in FFT_amp_table[freq].keys(): print('Sensor", "= (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu] =", "FFT Field at Sample Location # %% n_reps = 50 # %% Field_At_He_location_for_FFT", "json from scipy import fft from scipy.signal.windows import hann from copy import deepcopy", "%% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]]", "'normal', 'size' : 15, 'family': 'Times New Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) #", "= now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings", "at Sample Location # %% n_reps = 50 # %% Field_At_He_location_for_FFT = {}", "# 'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla } # %% Data_At_Sample", "Field_At_He_location_for_FFT['B time domain'] = {} Field_At_He_location_for_FFT['B freq domain'] = {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size", "%% for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num = 0 B_max =", "now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings =", "time domain'] = {} Field_At_He_location_for_FFT['B freq domain'] = {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize", "# 'memo':'{} Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save", "data # %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut", "= pickle.load(filehandle) # %% [markdown] # # Calculate field at sample location #", "shifts':True, 'optimize bar location':True, 'significant figures':n_sig } Exp2_plot_settings = { 'plot':True, # 'memo':'{}", "%% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with", "# 'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, # 'legend.labelspacing': 0.45,", "Exp2_save_settings ={ 'save fit data':False, # 'fit data filename':fitdatafilename } Exp2_all_settings = {", "1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at $^3$He location", "of sources':3, 'location dimensions':3, 'moment dimensions':3, 'location coordinate system':'polar', 'moment coordinate system':'polar', #", "horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at $^3$He location \\n", "# %% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] +", "import signif # %% # %autoreload 2 # %% # %% [markdown] #", "in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq", "the pickle in the same folder if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) #", "15, 'family': 'Times New Roman'} plt.rc('font', **font) # mpl.rcParams.update({'font.family':'serif'}) # %% # %load_ext", "\"+axiskey+\" |\",end=\"\") for Hekey in FFT_amp_table[freq].keys(): print(\" {:0.1f} |\".format(FFT_amp_table[freq][Hekey][axiskey]),end=\"\") print(\"\\n\") print(\"-------------------\") # %%", "0.45, # 'text.usetex': True, 'font.size':13, }) else: # %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font", "= 1-i_num/4) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu", "- Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) # %% for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B", "= 3 if dark_plots: dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault)", "field, {} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches =", "%% [markdown] # # Load data # %% Exp2_data_filename = LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %%", "FFT_amp_table[11*nu] = {} B_max_table = {} for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey]", "({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper", "duration)'.format(Hekey,n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field (pT)') plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches =", "# %% # %autoreload 2 # %% # %% [markdown] # # Load", "[markdown] # # Calculate field at sample location # # %% Sample_settings =", "B_calc_script import FieldAtAnyLocation from B_calc_script import signif # %% # %autoreload 2 #", "FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu] = {} B_max_table = {} for Hekey", "SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}.png'.format(Hekey),bbox_inches = 'tight',dpi = dpiN) # %% for axiskey in Field_At_He_location_for_FFT['B freq", "dimensions':rtr_dims, 'sensor locations':He_sample_locations, # 'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla }", "1-i_num/3) i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu =", "= 0 max_at_nu = 0 max_at_11nu = 0 for Hekey in Field_At_He_location_for_FFT['B freq", "impurities to field at $^3$He location \\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field,", "= LoadDataDir_Exp2+'Exp2_cut_averaged_data.pk' # %% with open(Exp2_data_filename,'rb') as file_obj: Exp2_data_cut = pickle.load(file_obj) # %%", "(Hz)') plt.title('Contribution of impurities to field at $^3$He location {}\\n ({:0.1f} s measurement", "# %% params_dims_locs # %% rtr_dims = params_dims_locs['rotor_dims'] for key in rtr_dims: rtr_dims[key]", "domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey] = 4*fft.rfft(Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") #", "# %% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs = json.load(fp) # %% params_dims_locs", "0 B_max = 0 max_at_nu = 0 max_at_11nu = 0 for axiskey in", "'text.usetex': True, 'font.size':13, }) else: # %matplotlib inline # mpl.rcParams.update(mpl.rcParamsDefault) font = {", "# SaveFitData = True dpiN = 1000 dark_plots = True n_sig = 8", "plt.annotate('$f_\\mathrm{rot}$',xy = (nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width =", "from scipy.signal.windows import hann from copy import deepcopy from scipy.stats import chi2 #", "= pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read())", "= Field_At_He_location_for_FFT['time'][2] - Field_At_He_location_for_FFT['time'][1] Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) # %% for Hekey in", "domain'][Hekey][axiskey]*hann(numsamples),norm = \"forward\") # %% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table =", "{} B_max_table = {} for Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] = {}", "'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save fit data':False, #", "settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample", "(nu,max_at_nu),xytext=(nu,B_max*1.4),\\ arrowprops=dict(color='red',alpha=0.5,width = 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center')", "pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding = 'utf-8') as filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) #", "in FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end = \" \") print(\"\\n\") for axiskey in", "= True dpiN = 1000 dark_plots = True n_sig = 8 n_print_sigfigs =", "Hz'.format(freq), end = \"\\n\") print(\"-------------------\") print(\"Axis |\", end = \" \") for Hekey", "duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey)) plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs:", "'X':np.array([]) }, } } # %% # nowtext = now().strftime(\"%Y%m%d%H%M\") nowtext = '_15font'", "axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = axiskey ,alpha", "= { 'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]), #theta positive for ac, negative", "dimensions':3, 'moment dimensions':3, 'location coordinate system':'polar', 'moment coordinate system':'polar', # 'chi tolerance':10, 'optimize", "plt.ylim(0,B_max*1.5) plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at $^3$He location {}\\n ({:0.1f}", "plt.xlabel('Frequency (Hz)') plt.title('Contribution of impurities to field at $^3$He location {}\\n ({:0.1f} s", "same folder if SaveFitFigs: if not os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData: # if", "'theta':np.concatenate([Exp2_data_cut['theta avg'][nu] # ,360+Exp2_data_cut['theta avg'][nu] ]), #theta positive for ac, negative for clockwise", "# %% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time domain'] = {}", "#'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2, #", "with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: # Exp2_Opt_Params_3_sources = pickle.load(filehandle) # Exp2_Opt_Params_3_sources=Exp2_Opt_Params_3_sources.tolist() with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.json','r',encoding =", "domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = Hekey ,alpha = 1-i_num/4) i_num +=1", "Field_At_He_location_for_FFT['freq']= fft.rfftfreq(n = numsamples,d=binsize) # %% for Hekey in field_at_sample.keys(): Field_At_He_location_for_FFT['B time domain'][Hekey]", "'../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2 # The other notebook stored the pickle in the", "Hekey in Field_At_He_location_for_FFT['B freq domain'].keys(): FFT_amp_table[nu][Hekey] = {} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] =", "= Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %% # with open('../Params/Params_4sources.pk','rb') as filehandle: #", "%% nu = 5 # %% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs =", "= FieldAtAnyLocation(Exp2_Opt_Params,Exp2_all_settings) # %% [markdown] # # FFT Field at Sample Location #", "time import os import pickle import json from scipy import fft from scipy.signal.windows", "%% print(\"FFT Amplitudes calculated at locations inside He spheroid\") for freq in FFT_amp_table.keys():", "freq domain'][Hekey] = {} for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps)", "string_to_parse = params_dims_locs['3He locations'][Hekey]['location'] He_sample_locations[Hekey]['location']=eval(string_to_parse.replace('rotor_dims','rtr_dims').replace('D_wheel_sample','params_dims_locs[\\'D_wheel_sample\\']')) # %% # with open('../Params/'+'FittedDipoles_{}Hz_'.format(nu)+'3sources.pk','rb') as filehandle: #", "= {} FFT_amp_table[11*nu][Hekey] = {} B_max_table[Hekey] = {} for axiskey in Field_At_He_location_for_FFT['B freq", "filehandle: Exp2_Opt_Params_3_sources = json.loads(filehandle.read()) # %% Exp2_Opt_Params_3_sources # %% Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC", "Field at Sample Location # %% n_reps = 50 # %% Field_At_He_location_for_FFT =", "coordinate system':'polar', 'moment coordinate system':'polar', # 'chi tolerance':10, 'optimize DC shifts':True, 'optimize bar", "True # SaveFitData = True dpiN = 1000 dark_plots = True n_sig =", "# if SaveFitData: # if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %% if dark_plots:", "nowtext = '_15font' fitplotfilename = SavePlotDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.png' # fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = {", "{ 'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params =", "= {} B_max_table[Hekey] = {} for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] =", "domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = axiskey ,alpha = 1-i_num/3) i_num +=1", "= True # SaveFitData = True dpiN = 1000 dark_plots = True n_sig", ".append(nu) FFT_amp_table[nu] = {} FFT_amp_table[11*nu] = {} B_max_table = {} for Hekey in", "plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches = 'tight',dpi = dpiN) # %%", "key in rtr_dims: rtr_dims[key] = signif(rtr_dims[key],n_sig) # %% He_sample_locations =deepcopy(params_dims_locs['3He locations']) for Hekey", "i_num +=1 B_max = max(B_max, B_max_table[Hekey][axiskey]) max_at_nu = max(max_at_nu, FFT_amp_table[nu][Hekey][axiskey]) max_at_11nu = max(max_at_11nu,", "ac, negative for clockwise 'B':{ '3He 1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, } }", "FFT_amp_table[freq].keys(): print('Sensor {} |'.format(Hekey), end = \" \") print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys():", "0.3, #'legend.borderaxespad': 0.25, # 'legend.columnspacing': 0.6, # 'legend.handlelength': 0.7, #'legend.handleheight': 0.4, #'legend.handletextpad': 0.2,", "Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][ind11nu]).max()", "= 1.5,headwidth=6, shrink=0.),\\ horizontalalignment='center') plt.annotate('$11f_\\mathrm{rot}$',xy = (11*nu,max_at_11nu),xytext=(11*nu,B_max*1.4),\\ arrowprops=dict(color='fuchsia',alpha=0.5,width = 1.5,headwidth=6,shrink=0.),\\ horizontalalignment='center') plt.ylim(0,B_max*1.5) plt.xlabel('Frequency", "Calculate field at sample location # # %% Sample_settings = { 'rotor dimensions':rtr_dims,", "os.path.exists(SavePlotDir_Exp2): os.makedirs(SavePlotDir_Exp2) # if SaveFitData: # if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %%", "'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save fit data':False, # 'fit", "os.makedirs(SavePlotDir_Exp2) # if SaveFitData: # if not os.path.exists(SaveDataDir_Exp2): # os.makedirs(SaveDataDir_Exp2) # %% if", "\") print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\" \"+axiskey+\" |\",end=\"\") for Hekey in FFT_amp_table[freq].keys():", "plt.figure() i_num = 0 B_max = 0 max_at_nu = 0 max_at_11nu = 0", "in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey]) ,label = Hekey ,alpha =", "axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): FFT_amp_table[nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B freq domain'][Hekey][axiskey][indnu]).max() FFT_amp_table[11*nu][Hekey][axiskey] = np.abs(Field_At_He_location_for_FFT['B", "'r') as fp: params_dims_locs = json.load(fp) # %% params_dims_locs # %% rtr_dims =", "'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC field_at_sample =", "0 max_at_11nu = 0 for axiskey in Field_At_He_location_for_FFT['B freq domain'][Hekey].keys(): plt.semilogx(Field_At_He_location_for_FFT['freq'] ,np.abs(Field_At_He_location_for_FFT['B freq", "dark='darkbg/' q = mpl.rc_params_from_file('matplotlibrc_dark') else: dark = 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' #", "= 'whitebg/' mpl.rcParams.update(mpl.rcParamsDefault) SavePlotDir_Exp2 = '../Results/2021-12-21_threesigfigs/Exp2/'+dark+'FittingFigs/' # SaveDataDir_Exp2 = '../Results/2021-11-16/Exp2/'+'Pickles/' LoadDataDir_Exp2 = '../Results/2021-12-20/Exp2/Pickles/'#SaveDataDir_Exp2", "n_reps = 50 # %% Field_At_He_location_for_FFT = {} Field_At_He_location_for_FFT['time'] = Data_At_Sample['theta']/360/nu Field_At_He_location_for_FFT['B time", "# The other notebook stored the pickle in the same folder if SaveFitFigs:", "as np # from scipy.optimize import minimize from datetime import datetime now =", "FieldAtAnyLocation from B_calc_script import signif # %% # %autoreload 2 # %% #", "at sample location # # %% Sample_settings = { 'rotor dimensions':rtr_dims, 'sensor locations':He_sample_locations,", "# %% SaveFitFigs = True # SaveFitData = True dpiN = 1000 dark_plots", "= {} for axiskey in field_at_sample[Hekey].keys(): Field_At_He_location_for_FFT['B time domain'][Hekey][axiskey] = np.tile(field_at_sample[Hekey][axiskey],n_reps) Field_At_He_location_for_FFT['B freq", "print('Sensor {} |'.format(Hekey), end = \" \") print(\"\\n\") for axiskey in FFT_amp_table[freq][Hekey].keys(): print(\"", "= 0 max_at_nu = 0 max_at_11nu = 0 for axiskey in Field_At_He_location_for_FFT['B freq", "at $^3$He location \\n ({:0.1f} s measurement duration)'.format(n_reps*Field_At_He_location_for_FFT['time'][-1])) plt.ylabel('Magnetic field, {} component (pT)'.format(axiskey))", "1':{ 'Z':np.array([]), 'Y':np.array([]), 'X':np.array([]) }, } } # %% # nowtext = now().strftime(\"%Y%m%d%H%M\")", "# fitdatafilename = SaveDataDir_Exp2+'FittedData_at_sample_{}Hz'.format(nu)+nowtext+'.pk' Exp2_optimization_settings = { 'print':True, 'number of sources':3, 'location dimensions':3,", "Hz'.format(nu), 'doubleplot':False, 'saveplot':SaveFitFigs, 'dpi':dpiN, 'figname':fitplotfilename, 'print sigfigs':n_print_sigfigs } Exp2_save_settings ={ 'save fit data':False,", "matplotlib.pyplot as plt import matplotlib as mpl import time import os import pickle", "Exp2_Opt_Params_3_sources_noDC_noBar = Exp2_Opt_Params_3_sources[:-5] Exp2_Opt_Params_3_sources_zeroDC = Exp2_Opt_Params_3_sources_noDC_noBar+ [0,0,0] + [Exp2_Opt_Params_3_sources[-1]] # %% # with", "plt.grid() plt.legend(loc = 'upper left') if SaveFitFigs: plt.savefig(SavePlotDir_Exp2+'BFFT_at_sample_{}_component.png'.format(axiskey),bbox_inches = 'tight',dpi = dpiN) #", "(np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} # FFT_amp_table['frequency'] .append(nu) FFT_amp_table[nu] = {}", "freq domain'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu = 0 max_at_11nu", "figures':n_sig } Exp2_plot_settings = { 'plot':True, # 'memo':'{} Hz (AV X&Y inverted)'.format(nu), #", "'bar location':0, # 'DC shifts':[DC_shift_AVx,DC_shift_AVy,DC_shift_AWy,DC_shift_AWz] # 'deltaB':1 #picoTesla } # %% Data_At_Sample =", "= 5 # %% with open('../Params/Exp2_dimensions_and_locations.json', 'r') as fp: params_dims_locs = json.load(fp) #", "= 0 B_max = 0 max_at_nu = 0 max_at_11nu = 0 for axiskey", "domain'] = {} Field_At_He_location_for_FFT['B freq domain'] = {} numsamples = n_reps*Field_At_He_location_for_FFT['time'].size binsize =", "pickle.load(file_obj) # %% [markdown] # ## Load parameters ## # %% nu =", "# mpl.rcParams.update(mpl.rcParamsDefault) font = { # 'weight' : 'normal', 'size' : 15, 'family':", "%% indnu = (np.abs(Field_At_He_location_for_FFT['freq']-nu)<0.5*nu) ind11nu = (np.abs(Field_At_He_location_for_FFT['freq']-11*nu)<0.5*nu) FFT_amp_table = {} # FFT_amp_table['frequency'] .append(nu)", "'experiment settings':Sample_settings, 'data':Data_At_Sample, 'optimization settings':Exp2_optimization_settings, 'plot settings':Exp2_plot_settings, 'save settings':Exp2_save_settings } Exp2_Opt_Params = Exp2_Opt_Params_3_sources_zeroDC", "in Field_At_He_location_for_FFT['B freq domain'].keys(): plt.figure() i_num = 0 B_max = 0 max_at_nu =", "'save fit data':False, # 'fit data filename':fitdatafilename } Exp2_all_settings = { 'experiment settings':Sample_settings," ]
[ "charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body =", "thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw,", "+ '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode", "green or amber headers are defined by other standards or tools.' propnote1 =", "' ' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr", "ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text =", "'specification of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text =", "Metadata Hub Documentation Generator' # Constant values StdVersion = \"1.3\" HeaderAppendix = \"\"", "xroot = ET.Element('html') head = ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text =", "for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) return creds", "the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video Metadata", "{'class':'hdrcol4'}) thcol4.text = '' \"\"\" moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a", "in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token:", "target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\">", "= ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink != '':", "186): xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr == 'Property Structures", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except: valstr = ' '", "= 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text", "try: valstr = valuesProp[rowcounter][9] except: valstr = ' ' xcell9.text = valstr xcell10", "valstr = ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try:", "properties for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) if not", "valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td',", "= valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except: valstr", "'tr') teststr = valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;')", "= ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link =", "= 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text =", "' xcell9.text = valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10]", "'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub Documentation Generator' #", "+ '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © ' + CopyrightYear + ', <a", "= ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it", "Hub mapping data from a Google sheet The retrieved data are transformed in", "thcol13link.text = 'Sony XDCAM & Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link =", "= valuesProp[0][6] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find", "for retrieving IPTC Video Metadata Hub mapping data from a Google sheet The", "16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "= valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except:", "= 'exiftool field ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'})", "defined by other standards or tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note on the column", "= ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr", "thcol16link.text = 'exiftool field ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a',", "creds = pickle.load(token) # If there are no (valid) credentials available, let the", "properties for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 =", "values StdVersion = \"1.3\" HeaderAppendix = \"\" # could be \" - D-R-A-F-T", "D-R-A-F-T - \" IPTCApprovalDate = \"13 May 2020\" IPTCRevisionDate = \"13 May 2020\"", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15", "on ' + IPTCApprovalDate + '. Document revision as of ' + IPTCRevisionDate", "'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text =", "'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text =", "= valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink", "id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "= valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS',", "body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this table the columns", "moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\"", "'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text =", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9 =", "ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = '", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink", "= valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow,", "href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink", "'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text =", "ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text =", "run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp, headingtext1,", "ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new in 2018-03 thcol13 = ET.SubElement(throw, 'th',", "May 2020\" IPTCRevisionDate = \"13 May 2020\" CopyrightYear = \"2020\" def get_credentials(): \"\"\"Gets", "Mapping VMHub - EIDR Data Fields 2.0', 'EIDR Data Fields 2.0', moreatlink, 17,'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html')", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15] if", "{'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new in 2018-03 thcol13", "thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM", "mappedstdnote.text = 'In this table the columns with a blue header are defined", "the EBU Core Metadata Standard.<br />XMP: based on the ISO XMP standard.<br />PVMD:", "OAuth2 flow is completed to obtain the new credentials. Returns: Credentials, the obtained", "' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7]", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Sony", "xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except: valstr = '", "Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See the '", "Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text", "thcol8link.text = 'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'})", "{'class':'hdrcol1'}) thcol1.text = ' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = '", "throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a", "' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr =", "about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode)", "'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields 2.0' #", "StdVersion = \"1.3\" HeaderAppendix = \"\" # could be \" - D-R-A-F-T -", "= ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - exiftool', 'exiftool", "2.0' # second row with \"find more at ...\" links throw = ET.SubElement(thead,", "{'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM & Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link", "{'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body,", "= ' ' xcell9.text = valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Apple Quicktime',", "+ '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if", "header are defined by the Video Metadata Hub, the columns with the green", "valstr = valuesProp[rowcounter][12] except: valstr = ' ' xcell12.text = valstr xcell13 =", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids' thcol17", "= ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'})", "pageheader.text = 'IPTC Video Metadata Hub - Recommendation '+ StdVersion +' / all", "xcell11.text = valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except:", "valuesProp[rowcounter][7] except: valstr = ' ' xcell8.text = valstr xcell9 = ET.SubElement(xrow, 'td',", "valstr = ' ' xcell10.text = valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try:", "= ET.fromstring('<p class=\"smallnote1\">Copyright © ' + CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all", "= ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' '", "= ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body, 'p',", "' xcell15.text = valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16]", "StdVersion + HeaderAppendix + '/ Mapping VMHub - Canon Cameras', 'Canon VideoClip XML',", "xcell2 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr", "valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr", "= ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr =", "{'class':'note1'}) seeotherdoc1.text = 'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text =", "'.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © ' + CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a>", "\"\"\" xcell4 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except:", "{'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property", "'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text = valstr", "thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core", "= 'IPTC PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'})", "= ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr", "from __future__ import print_function import pickle import os import sys from googleapiclient.discovery import", "try: valstr = valuesProp[rowcounter][12] except: valstr = ' ' xcell12.text = valstr xcell13", "{ 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text", "'\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink", "Core 2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "teststr = valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if", "valstr = ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try:", "#00cccc;') xcell1 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except:", "ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2,", "pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename): # create", "IPTCRevisionDate = \"13 May 2020\" CopyrightYear = \"2020\" def get_credentials(): \"\"\"Gets valid user", "all Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See the", "Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds,", "xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr =", "other standards or tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore:", "from a Google sheet The retrieved data are transformed in HTML as saved", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML'", "open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid)", "as token: pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename):", "seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1,", "sys from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request", "\"\"\" xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr =", "= valstr xcell7 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6]", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except: valstr = ' ' xcell14.text", "'Basic Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 =", "creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('token.pickle',", "{'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except: valstr = ' ' xcell9.text = valstr", "'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text = valstr", "recommended on ' + IPTCApprovalDate + '. Document revision as of ' +", "the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds =", "nothing has been stored, or if the stored credentials are invalid, the OAuth2", "Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - PB Core 2.1',", "iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body,", "\" - D-R-A-F-T - \" IPTCApprovalDate = \"13 May 2020\" IPTCRevisionDate = \"13", "= valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr =", "= valstr with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials =", "2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org'", "+ HeaderAppendix + '/ Mapping VMHub - exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html')", "moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) else: colcode =", "if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES)", "'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th',", "valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr =", "+ '/ Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC", "ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link", "thcol1.text = ' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' '", "</td>') throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink != '': colcode = ET.fromstring( '<td", "throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a", "' + CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under", "ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink != '': colcode", "field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "{'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except: valstr = ' ' xcell10.text = valstr", "moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', []) # create the", "valuesProp[rowcounter][14] except: valstr = ' ' xcell14.text = valstr xcell15 = ET.SubElement(xrow, 'td',", "thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8 = ET.SubElement(throw, 'th',", "valstr = valuesProp[rowcounter][15] except: valstr = ' ' xcell15.text = valstr xcell16 =", "obtain the new credentials. Returns: Credentials, the obtained credential. \"\"\" creds = None", "xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except: valstr = '", "'<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink != '': colcode =", "= ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' '", "= valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except:", "\"13 May 2020\" CopyrightYear = \"2020\" def get_credentials(): \"\"\"Gets valid user credentials from", "and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) #", "the columns with the green or amber headers are defined by other standards", "+ HeaderAppendix + '/ Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html')", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink", "...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink != '': colcode = ET.fromstring( '<td", "valid user credentials from storage. If nothing has been stored, or if the", "valstr = valuesProp[rowcounter][13] except: valstr = ' ' xcell13.text = valstr xcell14 =", "= ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a',", "creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds", "throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a", "valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td',", "# time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) #", "ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' \"\"\" moreatlink = valuesProp[0][4] colcode = ET.fromstring(", "thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8", "def main(): credentials = get_credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk'", "'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except: valstr = ' ' xcell11.text =", "'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader,", "'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' \"\"\"", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Schema.org',", "\"13 May 2020\" IPTCRevisionDate = \"13 May 2020\" CopyrightYear = \"2020\" def get_credentials():", "Metadata Hub - Recommendation '+ StdVersion +' / all Mappings' + HeaderAppendix seeotherdoc1", "thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2'", "+ '/ Mapping VMHub - Sony Cameras ', 'Sony XDCAM & Planning', moreatlink,", "with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video", "Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See the ' seeotherdoc1link2 =", "throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink +", "= valuesProp[0][12] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' +", "BQ: Updated and checked into GitHub \"\"\" from __future__ import print_function import pickle", "Metadata Standard.<br />XMP: based on the ISO XMP standard.<br />PVMD: a specification of", "class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink != '': colcode = ET.fromstring(", "= ' ' xcell17.text = valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as", "HeaderAppendix + '/ Mapping VMHub - Apple Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html')", "= 'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended", "Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw,", "Video Metadata Hub - Recommendation '+ StdVersion +' / all Mappings' + HeaderAppendix", "</td>') throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink != '': colcode = ET.fromstring( '<td", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink !=", "the ISO XMP standard.<br />PVMD: a specification of JSON properties for Photo and", "\"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href':", "{'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text = valstr", "valstr = ' ' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'})", "{'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw,", "'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg =", "valstr = ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try:", "'Definition / Semantics' \"\"\" thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality'", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16] if", "StdVersion + HeaderAppendix + '/ Mapping VMHub - PB Core 2.1', 'PB Core", "valstr = ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', { 'class':", "sheet The retrieved data are transformed in HTML as saved as HTML page.", "thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - MPEG", "Video Metadata Hub, the column with the green header is defined by '", "= valstr xcell2 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1]", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Canon Cameras', 'Canon", "about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink != '': colcode", "the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head') title = ET.SubElement(head,", "the column headers:<br />EBUcore: based on the EBU Core Metadata Standard.<br />XMP: based", "valstr = valuesProp[rowcounter][7] except: valstr = ' ' xcell8.text = valstr xcell9 =", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Schema.org', 'Schema.org', moreatlink,", "= 'Canon VideoClip XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'})", "target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring(", "'p', {'class':'note1'}) mappedstdnote.text = 'In this table the columns with a blue header", "result1.get('values', []) # create the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot,", "= valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr", "moreatlink = valuesProp[0][9] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"'", "be \" - D-R-A-F-T - \" IPTCApprovalDate = \"13 May 2020\" IPTCRevisionDate =", "= ' ' xcell14.text = valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr", "== 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try:", "xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except:", "as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials = get_credentials() service = build('sheets', 'v4',", "except: valstr = ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', {", "IPTCApprovalDate = \"13 May 2020\" IPTCRevisionDate = \"13 May 2020\" CopyrightYear = \"2020\"", "thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7'", "= ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text = 'Video Metadata Hub Mapping'", "ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved.", "thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'})", "are defined by the Video Metadata Hub, the columns with the green or", "class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode)", "= ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video Metadata Hub -", "seeotherdoc1link2.text = 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'})", "copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © ' + CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> -", "# create the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head') title", "xcell13.text = valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except:", "= 'Video Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html;", "mappings of the Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text =", "' ' \"\"\" thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' \"\"\" moreatlink", "'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials", "pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc,", "HeaderAppendix + '/ Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video", "'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except: valstr = ' ' xcell8.text =", "'In this table the columns with a blue header are defined by the", "valstr = ' ' xcell16.text = valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try:", "it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"'", "data are transformed in HTML as saved as HTML page. For IPTC-internal use", "valstr = ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try:", "= get_credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec", "= valuesProp[0][16] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' +", "Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping", "valuesProp[0][14] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink", "= valuesProp[rowcounter][17] except: valstr = ' ' xcell17.text = valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\"", "= ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields 2.0' # second row with", "flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as", "blue header are defined by the Video Metadata Hub, the column with the", "XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "flow.run_local_server(port=0) # Save the credentials for the next run with open('token.pickle', 'wb') as", "' ' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'})", "Recommendation '+ StdVersion +' / all Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p',", "'tbody') for rowcounter in range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0]", "head = ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text = 'Video Metadata Hub", "== 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr =", "Constant values StdVersion = \"1.3\" HeaderAppendix = \"\" # could be \" -", "started, download and HTML output ok 2020-06-15 BQ: Updated and checked into GitHub", "are no (valid) credentials available, let the user log in. if not creds", "valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except: valstr =", "valstr xcell2 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except:", "links throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = '", "{ 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text", "file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials = get_credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheetId", "Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "= 'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text", "= valuesProp[rowcounter][14] except: valstr = ' ' xcell14.text = valstr xcell15 = ET.SubElement(xrow,", "as of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '", "ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink != '': colcode", "'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text = valstr", "xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 =", "+ HeaderAppendix + '/ Mapping VMHub - Apple Quicktime', 'Apple Quicktime', moreatlink, 7,", "rights reserved. Published under the Creative Commons Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice)", "flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for", "'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video", "there are no (valid) credentials available, let the user log in. if not", "at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' +", "ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based on the EBU Core Metadata", "file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials = get_credentials() service = build('sheets', 'v4', credentials=credentials)", "class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode)", "# Constant values StdVersion = \"1.3\" HeaderAppendix = \"\" # could be \"", "valuesProp: print('No Property data found.') else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead", "try: valstr = valuesProp[rowcounter][7] except: valstr = ' ' xcell8.text = valstr xcell9", "'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except: valstr = ' ' xcell8.text =", "for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds", "ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub", "xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr =", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - EIDR", "'\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink", "= valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except: valstr", "', 'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "class=\"note1\">Note on the column headers:<br />EBUcore: based on the EBU Core Metadata Standard.<br", "range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr == 'Property", "+ '/ Mapping VMHub - exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC", "valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except: valstr =", "Mapping VMHub - MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata", "- Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "{'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text =", "or tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based on", "valstr = ' ' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'})", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except: valstr = ' '", "valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except: valstr =", "{ 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text", "= valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow,", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink !=", "seeotherdoc1link1.text = 'all recommended mappings of the Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body,", "thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3 = ET.SubElement(throw, 'td',", "filename): # create the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head')", "pretty_print=True).decode()) def main(): credentials = get_credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheetId =", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except: valstr = ' ' xcell17.text", "= ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except: valstr = ' ' xcell9.text", "'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'})", "xcell9.text = valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except:", "googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from lxml", "\"\"\" thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' \"\"\" thcol5 =", "credential. \"\"\" creds = None # The file token.pickle stores the user's access", "python3 \"\"\" Python script for retrieving IPTC Video Metadata Hub mapping data from", "'\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode =", "xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except:", "valstr = ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', { 'class':", "amber headers are defined by other standards or tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note", "{'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text = valstr", "not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else:", "' xcell12.text = valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13]", "= ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr =", "{'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link =", "'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except: valstr = ' ' xcell10.text =", "tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based on the", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13]", "VMHub - PB Core 2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video", "XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except: valstr = ' ' xcell8.text", "' xcell16.text = valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17]", "moreatlink = valuesProp[0][15] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"'", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - Canon Cameras', 'Canon VideoClip", "os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are", "= valuesProp[rowcounter][9] except: valstr = ' ' xcell9.text = valstr xcell10 = ET.SubElement(xrow,", "when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with", "thcol15link.text = 'Canon VideoClip XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a',", "StdVersion +' / all Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text", "- NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "ET.Element('html') head = ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text = 'Video Metadata", "ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = '", "'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1',", "= flow.run_local_server(port=0) # Save the credentials for the next run with open('token.pickle', 'wb')", "= service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', []) # create the HTML document", "xcell6 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr", "valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3]", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink !=", "about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a", "valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except: valstr =", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new", "'/ Mapping VMHub - Canon Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC", "' ' xcell8.text = valstr with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def", "{'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text = valstr", "' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' ' \"\"\" thcol4", "header are defined by the Video Metadata Hub, the column with the green", "ISO XMP standard.<br />PVMD: a specification of JSON properties for Photo and Video", "title.text = 'Video Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content':", "= ET.SubElement(head, 'title') title.text = 'Video Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta',", "'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except: valstr = ' ' xcell14.text =", "pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except: valstr = '", "#009999;') if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td',", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink !=", "with open('token.pickle', 'wb') as token: pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2,", "'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except:", "thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids' thcol17 = ET.SubElement(throw, 'th',", "= ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text", "and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No Property", "HeaderAppendix = \"\" # could be \" - D-R-A-F-T - \" IPTCApprovalDate =", "'1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values',", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9] if", "'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML' thcol16 =", "valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow,", "ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at", "{'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p',", "thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw, 'th',", "'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text =", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16]", "script for retrieving IPTC Video Metadata Hub mapping data from a Google sheet", "- Apple Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "= valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except: valstr", "thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'})", "thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' \"\"\" thcol4 =", "refresh tokens, and is # created automatically when the authorization flow completes for", "xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except: valstr = '", "more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink != '':", "thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON'", "'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP'", "valstr = ' ' xcell14.text = valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try:", "except: valstr = ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', {", "valuesProp[0][10] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink", "including only this mapping (better for printing)' if not valuesProp: print('No Property data", "PB Core 2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "(valid) credentials available, let the user log in. if not creds or not", "'\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode =", "'p', {'class':'note1'}) seeotherdoc1.text = 'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text", "HeaderAppendix + '/ Mapping VMHub - EIDR Data Fields 2.0', 'EIDR Data Fields", "= ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img',", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except: valstr = ' ' xcell15.text", "= 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw,", "CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run", "{'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except: valstr = ' ' xcell17.text = valstr", "{'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a',", "os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub Documentation Generator' # Constant values StdVersion", "= valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "Video Metadata Hub mapping data from a Google sheet The retrieved data are", "it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody", "stored credentials are invalid, the OAuth2 flow is completed to obtain the new", "'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr", "import pickle import os import sys from googleapiclient.discovery import build from google_auth_oauthlib.flow import", "moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr", "{'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings of the Video Metadata Hub.' seeotherdoc2 =", "HTML page. For IPTC-internal use Creator: <NAME> History: 2016-11-25 mws: project started, download", "'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text =", "' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr", "xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = '", "header of mappings to other standards provides a link to a table including", "= ' ' xcell12.text = valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr", "try: valstr = valuesProp[rowcounter][10] except: valstr = ' ' xcell10.text = valstr xcell11", "'/ Mapping VMHub - EIDR Data Fields 2.0', 'EIDR Data Fields 2.0', moreatlink,", "column headers:<br />EBUcore: based on the EBU Core Metadata Standard.<br />XMP: based on", "ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = '", "google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from lxml import etree as ET", "valstr = valuesProp[rowcounter][16] except: valstr = ' ' xcell16.text = valstr xcell17 =", "'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text =", "' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr =", "Sony Cameras ', 'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata", "a Google sheet The retrieved data are transformed in HTML as saved as", "headers:<br />EBUcore: based on the EBU Core Metadata Standard.<br />XMP: based on the", "ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg", "valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except: valstr =", "'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text =", "moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' ' \"\"\" thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'})", "valstr = ' ' xcell12.text = valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try:", "more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td", "valstr = ' ' xcell17.text = valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w')", "& Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9 = ET.SubElement(throw,", "Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG", "let the user log in. if not creds or not creds.valid: if creds", "ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink != '': colcode", "VMHub - MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "</td>') throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink != '': colcode = ET.fromstring( '<td", "the user log in. if not creds or not creds.valid: if creds and", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink", "{'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' \"\"\" thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text", "moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "VMHub - exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1' thcol12 = ET.SubElement(throw, 'th',", "class=\"smallnote1\">Copyright © '+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published", "thcol3.text = ' ' \"\"\" thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = ''", "GitHub \"\"\" from __future__ import print_function import pickle import os import sys from", "'/ Mapping VMHub - MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video", "xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr =", "mapIdx, filename): # create the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot,", "'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video", "IPTC Video Metadata Hub mapping data from a Google sheet The retrieved data", "'border':'0'}) pageheader.text = 'IPTC Video Metadata Hub - Recommendation '+ StdVersion +' /", "blue header are defined by the Video Metadata Hub, the columns with the", "+ HeaderAppendix + '/ Mapping VMHub - EIDR Data Fields 2.0', 'EIDR Data", "xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except:", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15 =", "Standard.<br />XMP: based on the ISO XMP standard.<br />PVMD: a specification of JSON", "and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds =", "the columns with a blue header are defined by the Video Metadata Hub,", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - MPEG 7',", "0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'})", "'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text =", "ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = '", "IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear + ', <a", "valuesProp[0][12] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink", "are invalid, the OAuth2 flow is completed to obtain the new credentials. Returns:", "ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2,", "...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink", "= ' ' \"\"\" thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' \"\"\"", "Creator: <NAME> History: 2016-11-25 mws: project started, download and HTML output ok 2020-06-15", "table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead,", "= valstr xcell3 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2]", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML' thcol16", "valuesProp[0][17] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink", "{'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'})", "= valuesProp[0][5] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find", "= ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link', {'type':", "= valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except: valstr", "'<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink != '': colcode =", "{'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON'", "'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body,", "seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video Metadata Hub properties'", "= ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' '", "of the Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See", "'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text =", "target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink !=", "HeaderAppendix + '/ Mapping VMHub - Canon Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html')", "storage. If nothing has been stored, or if the stored credentials are invalid,", "valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink =", "{'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'})", "</td>') throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink != '': colcode = ET.fromstring( '<td", "'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel':", "defined by the Video Metadata Hub, the columns with the green or amber", "= ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink != '':", "this table the columns with a blue header are defined by the Video", "2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "<NAME> History: 2016-11-25 mws: project started, download and HTML output ok 2020-06-15 BQ:", "xcell1 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr", "the green header is defined by ' + headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note", "print_function import pickle import os import sys from googleapiclient.discovery import build from google_auth_oauthlib.flow", "' ' xcell8.text = valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr =", "& Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text =", "'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th',", "xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr =", "'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return", "xcell16.text = valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except:", "ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = '", "except: valstr = ' ' xcell12.text = valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'})", "'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text = valstr", "'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "'PB Core 2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text", "except: valstr = ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'})", "ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text = 'Video Metadata Hub Mapping' metachset", "{'class':'hdrcol3'}) thcol3.text = ' ' \"\"\" thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text =", "thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link", "by IPTC (aka phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No Property data found.') else:", "'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', []) # create", "{'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM & Planning' thcol14 =", "thcol4.text = 'Basic Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore'", "valuesProp[0][7] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink", "JSON properties for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1", "= ' ' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', { 'class':", "moreatlink = valuesProp[0][14] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"'", "'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0]", "{'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'})", "P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon", "xcell3 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except: valstr = ' '", "Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "\"\"\" moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink +", "= ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in", "2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Panasonic", "+ '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode", "throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a", "HTML output ok 2020-06-15 BQ: Updated and checked into GitHub \"\"\" from __future__", "{'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second row with \"find more at ...\" links", "= '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp =", "Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' '", "= ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr =", "'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' '", "a link to a table including only this mapping (better for printing)' if", "ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link", "' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3 = ET.SubElement(throw,", "{'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1' thcol12 = ET.SubElement(throw,", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except: valstr = ' ' xcell11.text", "Data Fields 2.0' # second row with \"find more at ...\" links throw", "ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a',", "thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second row with \"find", "Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p',", "= valuesProp[rowcounter][11] except: valstr = ' ' xcell11.text = valstr xcell12 = ET.SubElement(xrow,", "= ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it", "{'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except: valstr = ' ' xcell14.text = valstr", "MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text =", "ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text", "of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © ' +", "'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' '", "ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC", "Core 2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text =", "'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except: valstr = ' ' xcell9.text =", "throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17]", "{'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except: valstr = ' ' xcell16.text = valstr", "main(): credentials = get_credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName", "the stored credentials are invalid, the OAuth2 flow is completed to obtain the", "in HTML as saved as HTML page. For IPTC-internal use Creator: <NAME> History:", "valstr = ' ' xcell11.text = valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try:", "StdVersion + HeaderAppendix + '/ Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink,", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM &", "+ HeaderAppendix + '/ Mapping VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new", "'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc =", "Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No", "as of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '+", "ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink != '': colcode", "= ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body')", "= valuesProp[0][17] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' +", "'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' '", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17] if", "about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode)", "credentials = get_credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName =", "'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8 = ET.SubElement(throw,", "found.') else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw", "= ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'})", "thcol7.text = 'IPTC PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a',", "= 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub Documentation Generator'", "xcell4 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr", "'exiftool field ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text", "try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text = valstr \"\"\"", "createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix +", "= 'IPTC Video Metadata Hub - Recommendation '+ StdVersion +' / all Mappings'", "transformed in HTML as saved as HTML page. For IPTC-internal use Creator: <NAME>", "thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name'", "obtained credential. \"\"\" creds = None # The file token.pickle stores the user's", "= 'The header of mappings to other standards provides a link to a", "= ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' ' \"\"\" thcol4 = ET.SubElement(throw, 'td',", "valstr = valuesProp[rowcounter][14] except: valstr = ' ' xcell14.text = valstr xcell15 =", "{ 'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except: valstr = ' ' xcell8.text", "valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td',", "ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = '", "defined by ' + headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br", "valuesProp[0][6] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more", "VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video", "thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' \"\"\" moreatlink = valuesProp[0][4] colcode", "= valuesProp[rowcounter][16] except: valstr = ' ' xcell16.text = valstr xcell17 = ET.SubElement(xrow,", "headingtext2 # second row with \"find more at ...\" links throw = ET.SubElement(thead,", "docdate.text = 'Mapping recommended on ' + IPTCApprovalDate + '. Document revision as", "service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', []) # create the HTML document xroot", "# Save the credentials for the next run with open('token.pickle', 'wb') as token:", "xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try: valstr =", "= ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except: valstr =", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Sony Cameras ',", "'thead') throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property", "= ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video Metadata Hub properties' docdate", "'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "(PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1", "{'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10 = ET.SubElement(throw, 'th',", "'<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink != '': colcode =", "' xcell8.text = valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9]", "saved as HTML page. For IPTC-internal use Creator: <NAME> History: 2016-11-25 mws: project", "valstr with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials = get_credentials()", "thcol9link.text = 'MPEG 7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'})", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new in", "open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials = get_credentials() service =", "' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2]", "valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except: valstr =", "'Canon VideoClip XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Canon", "thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition /", "if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request())", "'p', {'class':'note1'}) seeotherdoc2.text = 'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text", "'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second row with \"find more at ...\"", "standard.<br />PVMD: a specification of JSON properties for Photo and Video MetaData by", "recommended mappings of the Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text", "= None # The file token.pickle stores the user's access and refresh tokens,", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except: valstr = ' ' xcell10.text", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - MPEG 7', 'MPEG", "with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials = get_credentials() service", "ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text =", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - EIDR Data Fields 2.0',", "standards provides a link to a table including only this mapping (better for", "valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except: valstr =", "'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc", "from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except: valstr = ' ' xcell12.text", "try: valstr = valuesProp[rowcounter][mapIdx] except: valstr = ' ' xcell8.text = valstr with", "\"\" # could be \" - D-R-A-F-T - \" IPTCApprovalDate = \"13 May", "= ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings of the Video Metadata", "</td>') throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink != '': colcode = ET.fromstring( '<td", "\"\"\" xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr =", "= ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'})", "valstr = ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', { 'class':", "= ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink != '':", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - PB Core 2.1', 'PB", "if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there", "= ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink != '':", "valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr =", "15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "\"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body", "href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this table the", "+ moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink =", "valstr = ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', { 'class':", "VMHub - Canon Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata", "the new credentials. Returns: Credentials, the obtained credential. \"\"\" creds = None #", "+ IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear + ',", "= ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink != '':", "reserved. Published under the Creative Commons Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote", "'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' \"\"\" thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'})", "= ' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' ' \"\"\"", "= valuesProp[rowcounter][13] except: valstr = ' ' xcell13.text = valstr xcell14 = ET.SubElement(xrow,", "creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) # Save", "ET.SubElement(head, 'title') title.text = 'Video Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv':", "' ' xcell11.text = valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr =", "'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'):", "Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See the ' seeotherdoc1link2", "the Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See the", "print('No Property data found.') else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead =", "= valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr", "+' / all Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text =", "file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "output ok 2020-06-15 BQ: Updated and checked into GitHub \"\"\" from __future__ import", "'/ Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video", "= valuesProp[rowcounter][10] except: valstr = ' ' xcell10.text = valstr xcell11 = ET.SubElement(xrow,", "token: pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename): #", "for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body,", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink !=", "= valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow,", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except: valstr = ' ' xcell13.text", "valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except: valstr =", "rowcounter in range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr", "seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings of the Video", "created automatically when the authorization flow completes for the first # time. if", "thcol4.text = '' \"\"\" moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"'", "ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text", "'PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second row", "/>EBUcore: based on the EBU Core Metadata Standard.<br />XMP: based on the ISO", "properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended on ' +", "' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try:", "'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text =", "thcol17link.text = 'EIDR Data Fields 2.0' # second row with \"find more at", "Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text =", "ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text", "'<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>')", "= 'Video Metadata Hub Documentation Generator' # Constant values StdVersion = \"1.3\" HeaderAppendix", "are transformed in HTML as saved as HTML page. For IPTC-internal use Creator:", "throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a", "...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12]", "= ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' '", "valuesProp[0][mapIdx] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink", "standards or tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based", "provides a link to a table including only this mapping (better for printing)'", "= headingtext2 # second row with \"find more at ...\" links throw =", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink !=", "import Request from lxml import etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE =", "and checked into GitHub \"\"\" from __future__ import print_function import pickle import os", "valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0)", "= ' ' xcell8.text = valstr with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode())", "with \"find more at ...\" links throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw,", "EIDR Data Fields 2.0', 'EIDR Data Fields 2.0', moreatlink, 17,'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html') if __name__ ==", "field ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text =", "ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video Metadata Hub properties' docdate =", "= ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr", "2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony", "= 'EIDR Data Fields 2.0' # second row with \"find more at ...\"", "return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename): # create the HTML", "time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If", "href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under the Creative Commons Attribution 4.0 license", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 =", "{'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link =", "+ '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if", "'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId,", "not valuesProp: print('No Property data found.') else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'})", "'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text = valstr", "try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text = valstr xcell6", "valstr = ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', { 'class':", "{'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a',", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10] if", "= valuesProp[0][mapIdx] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' +", "= ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr =", "= 'Sony XDCAM & Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a',", "'. Document revision as of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p", "Fields 2.0' # second row with \"find more at ...\" links throw =", "os import sys from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests", "class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink != '': colcode = ET.fromstring(", "{'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except: valstr = ' ' xcell8.text = valstr", "in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', { 'class':", "ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video Metadata Hub properties' docdate =", "{'class':'smallnote1'}) docnote1.text = 'The header of mappings to other standards provides a link", "IPTC-internal use Creator: <NAME> History: 2016-11-25 mws: project started, download and HTML output", "first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token)", "Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended on '", "xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except: valstr = '", "Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No Property data", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second row with \"find more at", "ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text", "moreatlink = valuesProp[0][7] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"'", "= valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except: valstr", "\"\"\" from __future__ import print_function import pickle import os import sys from googleapiclient.discovery", "Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head,", "\"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC", "\"1.3\" HeaderAppendix = \"\" # could be \" - D-R-A-F-T - \" IPTCApprovalDate", "'' \"\"\" moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink", "from google.auth.transport.requests import Request from lxml import etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly'", "InstalledAppFlow from google.auth.transport.requests import Request from lxml import etree as ET SCOPES =", "result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', []) # create the HTML", "0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr", "valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text = valstr xcell8 =", "other standards provides a link to a table including only this mapping (better", "'Sony XDCAM & Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'})", "'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text", "= ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr =", "7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2'", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Apple Quicktime', 'Apple", "from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from lxml import etree as", "0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr", "more at ...\" links throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'})", "Mapping VMHub - Canon Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video", "'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text =", "ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link =", "= 'In this table the columns with a blue header are defined by", "href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) else:", "' ' xcell9.text = valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr =", "'The header of mappings to other standards provides a link to a table", "= ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'})", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody')", "by other standards or tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - Apple Quicktime', 'Apple Quicktime',", "valstr = ' ' xcell9.text = valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try:", "xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = '", "= 'PB Core 2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'})", "new in 2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text", "CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub Documentation Generator' # Constant", "Hub Documentation Generator' # Constant values StdVersion = \"1.3\" HeaderAppendix = \"\" #", "' \"\"\" thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' \"\"\" moreatlink =", "specification of JSON properties for Photo and Video MetaData by IPTC (aka phovidmd).</p>')", "' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try: valstr", "' xcell13.text = valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14]", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except: valstr = ' '", "moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\"", "xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = '", "Documentation Generator' # Constant values StdVersion = \"1.3\" HeaderAppendix = \"\" # could", "{'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text = valstr", "thcol11link.text = 'PB Core 2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a',", "= valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow,", "/>XMP: based on the ISO XMP standard.<br />PVMD: a specification of JSON properties", "{'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2", "by ' + headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore:", "by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The", "'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color:", "= ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new in 2018-03 thcol13 = ET.SubElement(throw,", "Fields 2.0', 'EIDR Data Fields 2.0', moreatlink, 17,'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html') if __name__ == '__main__': main()", "by the Video Metadata Hub, the column with the green header is defined", "class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink != '': colcode = ET.fromstring(", "valuesProp = result1.get('values', []) # create the HTML document xroot = ET.Element('html') head", "'wb') as token: pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx,", "ET.fromstring('<p class=\"smallnote1\">Copyright © ' + CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights", "Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "the Creative Commons Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p',", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11] if", "second row with \"find more at ...\" links throw = ET.SubElement(thead, 'tr') thcol1", "ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2 =", "' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6]", "valstr = ' ' xcell13.text = valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try:", "(aka phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No Property data found.') else: table =", "2016-11-25 mws: project started, download and HTML output ok 2020-06-15 BQ: Updated and", "Metadata Hub, the column with the green header is defined by ' +", "'Video Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"})", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody = ET.SubElement(table,", "10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "= ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr", "CopyrightYear = \"2020\" def get_credentials(): \"\"\"Gets valid user credentials from storage. If nothing", "= ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'})", "ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 =", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink", "ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM & Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'})", "VMHub - Sony Cameras ', 'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC", "= ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr =", "= \"2020\" def get_credentials(): \"\"\"Gets valid user credentials from storage. If nothing has", "+ '/ Mapping VMHub - PB Core 2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html')", "- \" IPTCApprovalDate = \"13 May 2020\" IPTCRevisionDate = \"13 May 2020\" CopyrightYear", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2',", "to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings of", "class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow", "ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink != '': colcode", "'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except: valstr = ' ' xcell16.text =", "{'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML' thcol16 = ET.SubElement(throw,", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except: valstr = ' '", "Google sheet The retrieved data are transformed in HTML as saved as HTML", "ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'})", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM & Planning'", "try: valstr = valuesProp[rowcounter][14] except: valstr = ' ' xcell14.text = valstr xcell15", "' ' xcell12.text = valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr =", "+ '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a>", "= valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr", "= ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text", "Property data found.') else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table,", "'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text =", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink !=", "seeotherdoc1.text = 'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all", "Mapping VMHub - Sony Cameras ', 'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp,", "</td>') throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink != '': colcode = ET.fromstring( '<td", "if teststr == 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) ==", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - exiftool',", "= ' ' xcell8.text = valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr", "download and HTML output ok 2020-06-15 BQ: Updated and checked into GitHub \"\"\"", "thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new in 2018-03 thcol13 =", "+ HeaderAppendix + '/ Mapping VMHub - Canon Cameras', 'Canon VideoClip XML', moreatlink,", "xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except:", "and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'})", "ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except: valstr = ' ' xcell16.text", "(aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The header of", "= result1.get('values', []) # create the HTML document xroot = ET.Element('html') head =", "= valuesProp[0][14] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' +", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Sony Cameras", "valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr", "only this mapping (better for printing)' if not valuesProp: print('No Property data found.')", "= ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link", "= valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr =", "xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)':", "headingtext2, findmoreaturl, mapIdx, filename): # create the HTML document xroot = ET.Element('html') head", "more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>')", "' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try:", "throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a", "= 'MPEG 7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text", "StdVersion + HeaderAppendix + '/ Mapping VMHub - MPEG 7', 'MPEG 7', moreatlink,", "{'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text = valstr", "' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr =", "</td>') throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink != '': colcode = ET.fromstring( '<td", "Request from lxml import etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)),", "if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink +", "except: valstr = ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'})", "'Schema.org' # new in 2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a',", "ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink != '': colcode", "pickle import os import sys from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow", "open('token.pickle', 'wb') as token: pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl,", "'td', {'class':'hdrcol4'}) thcol4.text = '' \"\"\" moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td", "'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text =", "ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at", "to a table including only this mapping (better for printing)' if not valuesProp:", "+ HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See the ' seeotherdoc1link1", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10", "\"\"\" thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' \"\"\" moreatlink = valuesProp[0][4]", "Mapping VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp, 'IPTC", "if not valuesProp: print('No Property data found.') else: table = ET.SubElement(body, 'table', {'class':'spec1", "class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow", "{'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link =", "' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr", "'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw,", "valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr =", "vmhmapping'}) thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th',", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink", "valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr =", "valuesProp[0][4] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more", "{ 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text", "' ' xcell13.text = valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr =", "EBU Core Metadata Standard.<br />XMP: based on the ISO XMP standard.<br />PVMD: a", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids'", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14] if", "Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE", "'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/", "could be \" - D-R-A-F-T - \" IPTCApprovalDate = \"13 May 2020\" IPTCRevisionDate", "+ '/ Mapping VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03", "'+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under the", "+ HeaderAppendix + '/ Mapping VMHub - MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html')", "= valuesProp[0][4] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find", "valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4]", "moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink + '\"", "valuesProp[0][15] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink", "{'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text =", "body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The header of mappings to", "{'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a',", "ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link", "'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video Metadata Hub - Recommendation '+ StdVersion +'", "been stored, or if the stored credentials are invalid, the OAuth2 flow is", "+ '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring(", "{'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video Metadata Hub - Recommendation '+ StdVersion", "or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow =", "{'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except: valstr = ' ' xcell11.text = valstr", "- Recommendation '+ StdVersion +' / all Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body,", "of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping", "14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink != '': colcode = ET.fromstring(", "Video Metadata Hub, the columns with the green or amber headers are defined", "= ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this table the columns with a", "None # The file token.pickle stores the user's access and refresh tokens, and", "if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink", "SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with", "retrieving IPTC Video Metadata Hub mapping data from a Google sheet The retrieved", "thcol14link.text = 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'})", "= ET.Element('html') head = ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text = 'Video", "thcol8.text = headingtext2 # second row with \"find more at ...\" links throw", "class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink != '': colcode = ET.fromstring(", "Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended on", "7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from lxml import", "moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "valstr = valuesProp[rowcounter][11] except: valstr = ' ' xcell11.text = valstr xcell12 =", "= ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr", "VideoClip XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text =", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - exiftool', 'exiftool field", "...\" links throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text =", "valuesProp[rowcounter][16] except: valstr = ' ' xcell16.text = valstr xcell17 = ET.SubElement(xrow, 'td',", "ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = '", "phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The header of mappings", "+ '/ Mapping VMHub - EIDR Data Fields 2.0', 'EIDR Data Fields 2.0',", "class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink != '': colcode = ET.fromstring(", "table including only this mapping (better for printing)' if not valuesProp: print('No Property", "' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © ' + CopyrightYear", "based on the ISO XMP standard.<br />PVMD: a specification of JSON properties for", "Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See the '", "= valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except: valstr", "try: valstr = valuesProp[rowcounter][11] except: valstr = ' ' xcell11.text = valstr xcell12", "valstr xcell7 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except:", "create the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head') title =", "'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1 =", "= ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink != '':", "= 'Schema.org' # new in 2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link =", "Mapping VMHub - Apple Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video", "revision as of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright ©", "xcell15.text = valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except:", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12] if", "'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video", "completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token:", "valstr = ' ' xcell15.text = valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try:", "as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata", "= 'Mapping recommended on ' + IPTCApprovalDate + '. Document revision as of", "ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css',", "valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td',", "moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "Metadata Hub, the columns with the green or amber headers are defined by", "and is # created automatically when the authorization flow completes for the first", "class=\"smallnote1\">Copyright © ' + CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved.", "= valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text = valstr \"\"\" xcell5 =", "'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text = valstr", "StdVersion + HeaderAppendix + '/ Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp,", "Cameras ', 'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring(", "it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink != '': colcode =", "'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "= \"1.3\" HeaderAppendix = \"\" # could be \" - D-R-A-F-T - \"", "StdVersion + HeaderAppendix + '/ Mapping VMHub - Sony Cameras ', 'Sony XDCAM", "xcell8.text = valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except:", "ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1", "HTML document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title')", "MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "Apple Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1]", "import os import sys from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from", "Updated and checked into GitHub \"\"\" from __future__ import print_function import pickle import", "ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader", "'/ Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "\"\"\" xcell5 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except:", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except: valstr = ' '", "' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5]", "IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © ' + CopyrightYear + ',", "Metadata Hub mapping data from a Google sheet The retrieved data are transformed", "valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td',", "for rowcounter in range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if", "SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub Documentation", "Published under the Creative Commons Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote =", "valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text = valstr \"\"\" xcell4", "= ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'})", "except: valstr = ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', {", "' ' xcell15.text = valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr =", "= ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights", "'<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>')", "findmoreaturl, mapIdx, filename): # create the HTML document xroot = ET.Element('html') head =", "= ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'})", "thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime'", "'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' '", "'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings of the Video Metadata Hub.' seeotherdoc2", "valuesProp[rowcounter][11] except: valstr = ' ' xcell11.text = valstr xcell12 = ET.SubElement(xrow, 'td',", "teststr == 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) == 0:", "<a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under the Creative Commons Attribution 4.0", "= ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'})", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - EIDR Data", "xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr =", "HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See the ' seeotherdoc1link1 =", "except: valstr = ' ' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td',", "If nothing has been stored, or if the stored credentials are invalid, the", "defined by the Video Metadata Hub, the column with the green header is", "Hub - Recommendation '+ StdVersion +' / all Mappings' + HeaderAppendix seeotherdoc1 =", "class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink != '': colcode = ET.fromstring(", "on the EBU Core Metadata Standard.<br />XMP: based on the ISO XMP standard.<br", "= valuesProp[0][9] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' +", "= \"13 May 2020\" IPTCRevisionDate = \"13 May 2020\" CopyrightYear = \"2020\" def", "- MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "'td', { 'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except: valstr = ' '", "= 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', []) #", "valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except: valstr =", "= headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return to ' seeotherdoc1link1", "thcol3.text = 'Definition / Semantics' \"\"\" thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text =", "'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new in 2018-03", "user credentials from storage. If nothing has been stored, or if the stored", "' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr", "xcell14.text = valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except:", "Generator' # Constant values StdVersion = \"1.3\" HeaderAppendix = \"\" # could be", "as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = '", "\"\"\"Gets valid user credentials from storage. If nothing has been stored, or if", "'EIDR Data Fields 2.0' # second row with \"find more at ...\" links", "= ' ' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', { 'class':", "pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return to '", "' ' xcell14.text = valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr =", "'': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more", "ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except: valstr = ' '", "stored, or if the stored credentials are invalid, the OAuth2 flow is completed", "ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text", "= ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a',", "body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a',", "= ' ' xcell10.text = valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr", "retrieved data are transformed in HTML as saved as HTML page. For IPTC-internal", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1'", "completed to obtain the new credentials. Returns: Credentials, the obtained credential. \"\"\" creds", "Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub -", "# If there are no (valid) credentials available, let the user log in.", "' ' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'})", "'/ Mapping VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp,", "HTML as saved as HTML page. For IPTC-internal use Creator: <NAME> History: 2016-11-25", "lxml import etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME", "' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' ' \"\"\" thcol4 =", "= ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' '", "except: valstr = ' ' xcell16.text = valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'})", "valstr = ' ' xcell8.text = valstr with open(filename, 'w') as file: file.write(ET.tostring(xroot,", "= valuesProp[rowcounter][mapIdx] except: valstr = ' ' xcell8.text = valstr with open(filename, 'w')", "more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>')", "ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text", "'/ Mapping VMHub - Sony Cameras ', 'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html')", "'+ StdVersion +' / all Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'})", "the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) return creds def", "valuesProp[0][9] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink", "+ ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under the Creative Commons", "token: creds = pickle.load(token) # If there are no (valid) credentials available, let", "try: valstr = valuesProp[rowcounter][17] except: valstr = ' ' xcell17.text = valstr filename", "ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg',", "'all recommended mappings of the Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'})", "this mapping (better for printing)' if not valuesProp: print('No Property data found.') else:", "valstr xcell8 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except:", "project started, download and HTML output ok 2020-06-15 BQ: Updated and checked into", "Mapping VMHub - PB Core 2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC", "StdVersion + HeaderAppendix + '/ Mapping VMHub - exiftool', 'exiftool field id', moreatlink,", "into GitHub \"\"\" from __future__ import print_function import pickle import os import sys", "== 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) == 0: xrow.set('style',", "moreatlink = valuesProp[0][16] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"'", "else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) # Save the credentials", "license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11]", "Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;')", "+ '. Document revision as of ' + IPTCRevisionDate + '.' copyrightnotice =", "= valuesProp[0][7] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' +", "__future__ import print_function import pickle import os import sys from googleapiclient.discovery import build", "' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video Metadata Hub", "mapping data from a Google sheet The retrieved data are transformed in HTML", "access and refresh tokens, and is # created automatically when the authorization flow", "= ' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink,", "creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename): # create the HTML document", "= ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw, 'th',", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10 =", "{ 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text", "'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub Documentation Generator' # Constant values StdVersion =", "ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'})", "the green or amber headers are defined by other standards or tools.' propnote1", "valstr = valuesProp[rowcounter][mapIdx] except: valstr = ' ' xcell8.text = valstr with open(filename,", "'background-color: #009999;') if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow,", "the user's access and refresh tokens, and is # created automatically when the", "{'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th',", "next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp,", "= valstr xcell6 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5]", "except: valstr = ' ' xcell10.text = valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'})", "ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at", "<a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this table", "' xcell17.text = valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as file: file.write(ET.tostring(xroot,", "HeaderAppendix + '/ Mapping VMHub - PB Core 2.1', 'PB Core 2.1', moreatlink,", "at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody =", "= ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'})", "Semantics' \"\"\" thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' \"\"\" thcol5", "= ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink != '':", "'p', {'class':'note1'}) seeotherdoc1.text = 'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text", "'<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink != '': colcode =", "try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text = valstr xcell7", "moreatlink = valuesProp[0][17] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"'", "valuesProp[rowcounter][10] except: valstr = ' ' xcell10.text = valstr xcell11 = ET.SubElement(xrow, 'td',", "columns with a blue header are defined by the Video Metadata Hub, the", "seeotherdoc2.text = 'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification", "valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about", "data found.') else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead')", "# The file token.pickle stores the user's access and refresh tokens, and is", "xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except:", "= ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2", "'<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186):", "2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1", "'/ Mapping VMHub - PB Core 2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp,", "= ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based on the EBU Core", "' ' xcell17.text = valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as file:", "'MPEG 7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text =", "build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from lxml import etree", "xcell10.text = valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except:", "4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In", "11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text = valstr xcell2 =", "thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'})", "...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink != '': colcode = ET.fromstring( '<td", "at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink =", "on the ISO XMP standard.<br />PVMD: a specification of JSON properties for Photo", "# could be \" - D-R-A-F-T - \" IPTCApprovalDate = \"13 May 2020\"", "' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr =", "xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = '", "invalid, the OAuth2 flow is completed to obtain the new credentials. Returns: Credentials,", "{'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except: valstr = ' ' xcell12.text = valstr", "'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except: valstr = ' ' xcell12.text =", "thcol7.text = 'PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 #", "= valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except: valstr", "the Video Metadata Hub, the column with the green header is defined by", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Schema.org', 'Schema.org',", "moreatlink = valuesProp[0][13] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"'", "'<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>')", "for printing)' if not valuesProp: print('No Property data found.') else: table = ET.SubElement(body,", "mappings to other standards provides a link to a table including only this", "= ' ' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try:", "= ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM & Planning' thcol14 = ET.SubElement(throw, 'th',", "Core Metadata Standard.<br />XMP: based on the ISO XMP standard.<br />PVMD: a specification", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1' thcol12", "Mapping VMHub - exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata", "thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'})", "'<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink != '': colcode =", "= 'PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about", "valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text = valstr xcell6 =", "are defined by the Video Metadata Hub, the column with the green header", "column with the green header is defined by ' + headingtext2 propnote1 =", "= ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text =", "= ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' '", "xcell7 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr", "and HTML output ok 2020-06-15 BQ: Updated and checked into GitHub \"\"\" from", "valuesProp[rowcounter][15] except: valstr = ' ' xcell15.text = valstr xcell16 = ET.SubElement(xrow, 'td',", "Canon Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "columns with the green or amber headers are defined by other standards or", "credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) return", "filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7]", "'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text =", "as HTML page. For IPTC-internal use Creator: <NAME> History: 2016-11-25 mws: project started,", "the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle',", "= ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video Metadata Hub properties' docdate", "ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video Metadata Hub - Recommendation", "'<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink != '': colcode =", "thcol12link.text = 'Schema.org' # new in 2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link", "= 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON' thcol8", "import sys from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import", "= 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8 =", "seeotherdoc1link1.text = 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'})", "valuesProp[rowcounter][12] except: valstr = ' ' xcell12.text = valstr xcell13 = ET.SubElement(xrow, 'td',", "ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink != '': colcode", "0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0]", "'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return to", "xcell8.text = valstr with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials", "a blue header are defined by the Video Metadata Hub, the columns with", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - Sony Cameras ', 'Sony", "import print_function import pickle import os import sys from googleapiclient.discovery import build from", "docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended on ' + IPTCApprovalDate", "moreatlink = valuesProp[0][12] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"'", "+ '/ Mapping VMHub - MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC", "if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'})", "ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13] if", "= ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2", "'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials = get_credentials() service = build('sheets',", "' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try:", "tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow = ET.SubElement(tbody, 'tr')", "open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata", "as saved as HTML page. For IPTC-internal use Creator: <NAME> History: 2016-11-25 mws:", "VMHub - Apple Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata", "' + IPTCApprovalDate + '. Document revision as of ' + IPTCRevisionDate +", "12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "' xcell8.text = valstr with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main():", "'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids' thcol17 =", "7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except: valstr = ' '", "except: valstr = ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'})", "from storage. If nothing has been stored, or if the stored credentials are", "xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = '", "Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw,", "moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6]", "throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a", "= ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' \"\"\" thcol4 = ET.SubElement(throw,", "= 'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics'", "' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try:", "9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "= 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text", "throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink +", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Apple", "on the column headers:<br />EBUcore: based on the EBU Core Metadata Standard.<br />XMP:", "© '+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under", "= ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink != '':", "link to a table including only this mapping (better for printing)' if not", "'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text =", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - PB Core", "= valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow,", "= valuesProp[0][13] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' +", "'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1", "XDCAM & Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text", "= \"\" # could be \" - D-R-A-F-T - \" IPTCApprovalDate = \"13", "'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text =", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw,", "= 'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of", "exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink !=", "= \"13 May 2020\" CopyrightYear = \"2020\" def get_credentials(): \"\"\"Gets valid user credentials", "and refresh tokens, and is # created automatically when the authorization flow completes", "ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'})", "propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based on the EBU", "#!/usr/bin/env python3 \"\"\" Python script for retrieving IPTC Video Metadata Hub mapping data", "- all rights reserved. Published under the Creative Commons Attribution 4.0 license <a", "ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR", "moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx]", "green header is defined by ' + headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note on", "except: valstr = ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', {", "it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink != '': colcode =", "'content': \"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'})", "seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2,", "CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under the Creative", "'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text = valstr", "valstr xcell6 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except:", "= ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The header of mappings to other standards", "MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No Property data found.')", "'<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink != '': colcode =", "'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE,", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second row with \"find more", "with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no", "xcell17.text = valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode())", "Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "mws: project started, download and HTML output ok 2020-06-15 BQ: Updated and checked", "= 'all recommended mappings of the Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p',", "valuesProp[0][5] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more", "ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'})", "JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second row with", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter", "'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text =", "= valuesProp[0][10] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' +", "class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink != '': colcode = ET.fromstring(", "{'class':'hdrcol2'}) thcol2.text = ' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = '", "creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0)", "- Canon Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "{'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p',", "If there are no (valid) credentials available, let the user log in. if", "' + headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based", "= ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except: valstr = ' '", "' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video Metadata Hub", "= ' ' xcell16.text = valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr", "range=rangeName).execute() valuesProp = result1.get('values', []) # create the HTML document xroot = ET.Element('html')", "createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename): # create the HTML document xroot =", "header is defined by ' + headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note on the", "{'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7", "= valuesProp[rowcounter][7] except: valstr = ' ' xcell8.text = valstr xcell9 = ET.SubElement(xrow,", "valstr = ' ' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', {", "valuesProp[0][11] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink", "as token: creds = pickle.load(token) # If there are no (valid) credentials available,", "= ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a',", "'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw,", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - EIDR Data Fields", "'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings", "valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text = valstr \"\"\" xcell5", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html')", "to obtain the new credentials. Returns: Credentials, the obtained credential. \"\"\" creds =", "' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr =", "iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text", "xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except: valstr = '", "of JSON properties for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1)", "' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr =", "+ HeaderAppendix + '/ Mapping VMHub - PB Core 2.1', 'PB Core 2.1',", "= 'Basic Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6", "= ' ' xcell13.text = valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr", "credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute()", "file token.pickle stores the user's access and refresh tokens, and is # created", "valuesProp[0][13] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink", "- EIDR Data Fields 2.0', 'EIDR Data Fields 2.0', moreatlink, 17,'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html') if __name__", "service = build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1", "with the green or amber headers are defined by other standards or tools.'", "{'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except: valstr = ' ' xcell13.text = valstr", "{'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video Metadata", "HeaderAppendix + '/ Mapping VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in", "[]) # create the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head')", "docnote1.text = 'The header of mappings to other standards provides a link to", "= ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it", "= 'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3", "at ...\" links throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text", "moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5]", "= ' ' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try:", "= ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended on ' + IPTCApprovalDate +", "headers are defined by other standards or tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note on", "'/ Mapping VMHub - Apple Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC", "about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a", "not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file(", "Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata", "rangeName = 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', [])", "VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text", "by the Video Metadata Hub, the columns with the green or amber headers", "valuesProp[0][16] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink", "= build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1 =", "a specification of JSON properties for Photo and Video MetaData by IPTC (aka", "try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text = valstr xcell2", "xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except: valstr = '", "copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all", "a table including only this mapping (better for printing)' if not valuesProp: print('No", "credentials are invalid, the OAuth2 flow is completed to obtain the new credentials.", "'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2 = ET.SubElement(throw,", "class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink != '': colcode = ET.fromstring(", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14]", "ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' \"\"\" thcol4 = ET.SubElement(throw, 'th',", "google.auth.transport.requests import Request from lxml import etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE", "thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'})", "valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except: valstr =", "'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body,", "new credentials. Returns: Credentials, the obtained credential. \"\"\" creds = None # The", "# created automatically when the authorization flow completes for the first # time.", "'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD", "= 'Definition / Semantics' \"\"\" thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic", "in 2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text =", "+ '/ Mapping VMHub - Canon Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp,", "document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text", "data from a Google sheet The retrieved data are transformed in HTML as", "= ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink != '':", "mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this table the columns with", "thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields 2.0' # second row", "no (valid) credentials available, let the user log in. if not creds or", "{'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th',", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Canon Cameras',", "VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "row with \"find more at ...\" links throw = ET.SubElement(thead, 'tr') thcol1 =", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink", "{'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 =", "+ HeaderAppendix + '/ Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC", "' ' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr", "= valuesProp[rowcounter][15] except: valstr = ' ' xcell15.text = valstr xcell16 = ET.SubElement(xrow,", "'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text =", "StdVersion + HeaderAppendix + '/ Mapping VMHub - EIDR Data Fields 2.0', 'EIDR", "use Creator: <NAME> History: 2016-11-25 mws: project started, download and HTML output ok", "#00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr =", "- PB Core 2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata", "Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1 =", "'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader =", "'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text", "from lxml import etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json')", "xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except: valstr = '", "available, let the user log in. if not creds or not creds.valid: if", "metachset = ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link',", "'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left',", "'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM & Planning' thcol14", "' xcell10.text = valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11]", "except: valstr = ' ' xcell13.text = valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'})", "</td>') throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink != '': colcode = ET.fromstring( '<td", "'<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink != '': colcode =", "/ all Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See", "get_credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R'", "credentials. Returns: Credentials, the obtained credential. \"\"\" creds = None # The file", "JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple", "JSON properties for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) if", "{'class':'note1'}) seeotherdoc1.text = 'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text =", "+ IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © ' + CopyrightYear +", "'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link", "= ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr')", "ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text", "page. For IPTC-internal use Creator: <NAME> History: 2016-11-25 mws: project started, download and", "- exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub -", "class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink != '': colcode = ET.fromstring(", "ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink != '': colcode", "= ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'})", "moreatlink = valuesProp[0][10] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"'", "'title') title.text = 'Video Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\",", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - PB", "if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink +", "'td', {'class':'hdrcol3'}) thcol3.text = ' ' \"\"\" thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text", "thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html')", "' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3 =", "'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' '", "ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' \"\"\" thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'})", "{'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields 2.0' # second", "token) return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename): # create the", "credentials available, let the user log in. if not creds or not creds.valid:", "valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text = valstr xcell3 =", "Commons Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text", "' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE", "HeaderAppendix + '/ Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp,", "'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except: valstr = ' ' xcell17.text =", "ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text", "spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', []) # create the HTML document xroot =", "thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11", "'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video Metadata Hub properties'", "= 'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of", "the obtained credential. \"\"\" creds = None # The file token.pickle stores the", "are defined by other standards or tools.' propnote1 = ET.fromstring('<p class=\"note1\">Note on the", "+ HeaderAppendix + '/ Mapping VMHub - Sony Cameras ', 'Sony XDCAM &", "ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr =", "thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 =", "teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try:", "thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' #", "it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"'", "throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a", "= ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except: valstr = ' '", "it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink", "= InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the", "xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except: valstr = '", "ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except: valstr = '", "has been stored, or if the stored credentials are invalid, the OAuth2 flow", "= ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text", "class=\"hdrcolNoniptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode)", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - NewsML-G2', 'NewsML-G2',", "+ IPTCApprovalDate + '. Document revision as of ' + IPTCRevisionDate + '.'", "ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this table the columns with a blue", "at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody =", "HeaderAppendix + '/ Mapping VMHub - exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp,", "= ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link", "except: valstr = ' ' xcell8.text = valstr with open(filename, 'w') as file:", "'<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186):", "\" IPTCApprovalDate = \"13 May 2020\" IPTCRevisionDate = \"13 May 2020\" CopyrightYear =", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - MPEG 7', 'MPEG 7',", "if the stored credentials are invalid, the OAuth2 flow is completed to obtain", "{'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD", "ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings of the Video Metadata Hub.'", "xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4]", "'IPTC PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text", "XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "= ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'})", "valstr = ' ' xcell8.text = valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try:", "= pickle.load(token) # If there are no (valid) credentials available, let the user", "more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td", "= ' ' xcell15.text = valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr", "'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except: valstr = ' ' xcell13.text =", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink", "xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9", "title = ET.SubElement(head, 'title') title.text = 'Video Metadata Hub Mapping' metachset = ET.SubElement(head,", "spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp", "or amber headers are defined by other standards or tools.' propnote1 = ET.fromstring('<p", "is completed to obtain the new credentials. Returns: Credentials, the obtained credential. \"\"\"", "seeotherdoc1.text = 'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification", "user log in. if not creds or not creds.valid: if creds and creds.expired", "© ' + CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published", "Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' \"\"\" thcol4", "' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings of the", "\"\"\" thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th',", "'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC", "</td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow =", "{'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids' thcol17 = ET.SubElement(throw,", "tokens, and is # created automatically when the authorization flow completes for the", "try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text = valstr xcell3", "= valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except: valstr", "the OAuth2 flow is completed to obtain the new credentials. Returns: Credentials, the", "' xcell14.text = valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15]", "body.append(propnote1) if not valuesProp: print('No Property data found.') else: table = ET.SubElement(body, 'table',", "{'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new in 2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'})", "\"\"\" creds = None # The file token.pickle stores the user's access and", "'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text =", "= ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'})", "'\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td", "it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody", "ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields 2.0'", "= '' \"\"\" moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' +", "at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink != '': colcode = ET.fromstring(", "the column with the green header is defined by ' + headingtext2 propnote1", "a blue header are defined by the Video Metadata Hub, the column with", "ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields 2.0' # second row with \"find", "(better for printing)' if not valuesProp: print('No Property data found.') else: table =", "import etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME =", "xcell12.text = valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except:", "else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for", "valstr = ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try:", "moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "HeaderAppendix + '/ Mapping VMHub - MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp,", "ok 2020-06-15 BQ: Updated and checked into GitHub \"\"\" from __future__ import print_function", "IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The header", "Python script for retrieving IPTC Video Metadata Hub mapping data from a Google", "thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'})", "+ '/ Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata", "xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3]", "Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1", "' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr =", "except: valstr = ' ' xcell14.text = valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'})", "else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw =", "except: valstr = ' ' xcell17.text = valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename,", "'p', {'class':'note1'}) docdate.text = 'Mapping recommended on ' + IPTCApprovalDate + '. Document", "ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink != '': colcode", "headingtext1, headingtext2, findmoreaturl, mapIdx, filename): # create the HTML document xroot = ET.Element('html')", "valstr xcell3 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except:", "table the columns with a blue header are defined by the Video Metadata", "try: valstr = valuesProp[rowcounter][16] except: valstr = ' ' xcell16.text = valstr xcell17", "etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video", "target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\">", "ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link =", "!= '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find", "StdVersion + HeaderAppendix + '/ Mapping VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') #", "= valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except: valstr", "+ StdVersion + HeaderAppendix + '/ Mapping VMHub - exiftool', 'exiftool field id',", "'/ Mapping VMHub - exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video", "= ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr =", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10]", "= ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' '", "try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text = valstr \"\"\"", "thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data", "{ 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text", "{'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text = valstr", "' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear +", "def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename): # create the HTML document xroot", "headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return to ' seeotherdoc1link1 =", "IPTC (aka phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No Property data found.') else: table", "thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON' thcol8 = ET.SubElement(throw,", "thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw, 'th',", "thcol2.text = ' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' '", "Hub, the columns with the green or amber headers are defined by other", "= valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except: valstr", "2020\" IPTCRevisionDate = \"13 May 2020\" CopyrightYear = \"2020\" def get_credentials(): \"\"\"Gets valid", "\"2020\" def get_credentials(): \"\"\"Gets valid user credentials from storage. If nothing has been", "def get_credentials(): \"\"\"Gets valid user credentials from storage. If nothing has been stored,", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15]", "The file token.pickle stores the user's access and refresh tokens, and is #", "/>PVMD: a specification of JSON properties for Photo and Video MetaData by IPTC", "' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9]", "IPTCApprovalDate + '. Document revision as of ' + IPTCRevisionDate + '.' copyrightnotice", "log in. if not creds or not creds.valid: if creds and creds.expired and", "file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "# new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "XMP standard.<br />PVMD: a specification of JSON properties for Photo and Video MetaData", "+ moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) else: colcode", "the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token)", "seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1,", "' ' xcell10.text = valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr =", "The retrieved data are transformed in HTML as saved as HTML page. For", "xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except: valstr = '", "try: valstr = valuesProp[rowcounter][13] except: valstr = ' ' xcell13.text = valstr xcell14", "'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' '", "{'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition", "= valuesProp[0][15] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' +", "InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next", "'.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> -", "'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON' thcol8 =", "or if the stored credentials are invalid, the OAuth2 flow is completed to", "throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow = ET.SubElement(tbody,", "{'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text = valstr", "'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10 = ET.SubElement(throw,", "flow is completed to obtain the new credentials. Returns: Credentials, the obtained credential.", "'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except: valstr = ' ' xcell8.text = valstr", "the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video Metadata", "</td>') throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink != '': colcode = ET.fromstring( '<td", "'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB", "csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot,", "History: 2016-11-25 mws: project started, download and HTML output ok 2020-06-15 BQ: Updated", "# new in 2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'})", "{'http-equiv': \"Content-Type\", 'content': \"text/html; charset=utf-8\"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet',", "HeaderAppendix + '/ Mapping VMHub - Sony Cameras ', 'Sony XDCAM & Planning',", "import InstalledAppFlow from google.auth.transport.requests import Request from lxml import etree as ET SCOPES", "get_credentials(): \"\"\"Gets valid user credentials from storage. If nothing has been stored, or", "except: valstr = ' ' xcell8.text = valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'})", "ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The header of mappings to other standards provides", "= valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr", "under the Creative Commons Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body,", "NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' +", "valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow,", "= os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub Documentation Generator' # Constant values", "throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group'", "target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink !=", "- Sony Cameras ', 'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video", "'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except: valstr = ' ' xcell15.text =", "except: valstr = ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'})", "'head') title = ET.SubElement(head, 'title') title.text = 'Video Metadata Hub Mapping' metachset =", "ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text", "'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video Metadata Hub - Recommendation '+", "creds = None # The file token.pickle stores the user's access and refresh", "authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb')", "Video Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended", "the Video Metadata Hub, the columns with the green or amber headers are", "of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright © '+ CopyrightYear", "= valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text = valstr \"\"\" xcell4 =", "Document revision as of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class=\"smallnote1\">Copyright", "thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field", "creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) # Save the", "= ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'})", "checked into GitHub \"\"\" from __future__ import print_function import pickle import os import", "', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under the Creative Commons Attribution", "ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'})", "moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "xcell5 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr", "Creative Commons Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'})", "{'class':'note1'}) seeotherdoc2.text = 'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text =", "throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a", "Data Fields 2.0', 'EIDR Data Fields 2.0', moreatlink, 17,'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html') if __name__ == '__main__':", "'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation '", "'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "Credentials, the obtained credential. \"\"\" creds = None # The file token.pickle stores", "{'class':'note1'}) docdate.text = 'Mapping recommended on ' + IPTCApprovalDate + '. Document revision", "P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text = valstr xcell7 =", "thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' ' \"\"\" thcol4 = ET.SubElement(throw,", "mapping (better for printing)' if not valuesProp: print('No Property data found.') else: table", "at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink =", "for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) if not valuesProp:", "' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try:", "valstr = valuesProp[rowcounter][17] except: valstr = ' ' xcell17.text = valstr filename =", "7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "except: valstr = ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', {", "xcell8 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except: valstr", "/ Semantics' \"\"\" thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' \"\"\"", "thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML' thcol16 = ET.SubElement(throw, 'th',", "'<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink != '': colcode =", "For IPTC-internal use Creator: <NAME> History: 2016-11-25 mws: project started, download and HTML", "{'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields 2.0' # second row with \"find more", "with a blue header are defined by the Video Metadata Hub, the columns", "13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix", "' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try:", "xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = '", "\"\"\" Python script for retrieving IPTC Video Metadata Hub mapping data from a", "Returns: Credentials, the obtained credential. \"\"\" creds = None # The file token.pickle", "Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Panasonic Cameras',", "+ headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based on", "= ' ' xcell11.text = valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr", "' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try:", "Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub", "valstr = ' ' xcell3.text = valstr \"\"\" xcell4 = ET.SubElement(xrow, 'td', {", "user's access and refresh tokens, and is # created automatically when the authorization", "is defined by ' + headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note on the column", "throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a", "build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get(", "it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) moreatlink", "'': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more", "= ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON' thcol8 = ET.SubElement(throw, 'th',", "moreatlink + '\" target=\"_blank\">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7]", "colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find more about", "valuesProp[rowcounter][mapIdx] except: valstr = ' ' xcell8.text = valstr with open(filename, 'w') as", "pickle.load(token) # If there are no (valid) credentials available, let the user log", "except: valstr = ' ' xcell15.text = valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'})", "StdVersion + HeaderAppendix + '/ Mapping VMHub - Apple Quicktime', 'Apple Quicktime', moreatlink,", "- Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata", "to other standards provides a link to a table including only this mapping", "= valuesProp[rowcounter][12] except: valstr = ' ' xcell12.text = valstr xcell13 = ET.SubElement(xrow,", "docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The header of mappings to other", "except: valstr = ' ' xcell9.text = valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'})", "= valuesProp[0][11] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"' +", "except: valstr = ' ' xcell4.text = valstr \"\"\" xcell5 = ET.SubElement(xrow, 'td',", "VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub", "'Mapping recommended on ' + IPTCApprovalDate + '. Document revision as of '", "moreatlink = valuesProp[0][11] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"><a href=\"'", "'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1' thcol12 =", "+ '/ Mapping VMHub - Apple Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp,", "valuesProp[rowcounter][9] except: valstr = ' ' xcell9.text = valstr xcell10 = ET.SubElement(xrow, 'td',", "thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM & Planning' thcol14 = ET.SubElement(throw,", "= ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' \"\"\" moreatlink = valuesProp[0][4] colcode =", "= ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields", "...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) tbody = ET.SubElement(table,", "moreatlink = valuesProp[0][mapIdx] if moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"'", "except: valstr = ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'})", "# second row with \"find more at ...\" links throw = ET.SubElement(thead, 'tr')", "moreatlink != '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink + '\"", "VMHub - EIDR Data Fields 2.0', 'EIDR Data Fields 2.0', moreatlink, 17,'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html') if", "+ CopyrightYear + ', <a href=\"https://iptc.org\">IPTC</a> - all rights reserved. Published under the", "moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion +", "at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' +", "thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2 = ET.SubElement(throw, 'td',", "{'class':'note1'}) mappedstdnote.text = 'In this table the columns with a blue header are", "moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No Property data found.') else: table = ET.SubElement(body,", "'IPTC Video Metadata Hub - Recommendation '+ StdVersion +' / all Mappings' +", "' ' xcell16.text = valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr =", "with the green header is defined by ' + headingtext2 propnote1 = ET.fromstring('<p", "' xcell11.text = valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12]", "2020\" CopyrightYear = \"2020\" def get_credentials(): \"\"\"Gets valid user credentials from storage. If", "based on the EBU Core Metadata Standard.<br />XMP: based on the ISO XMP", "Hub, the column with the green header is defined by ' + headingtext2", "!= '': colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"><a href=\"' + moreatlink + '\" target=\"_blank\">Find", "in range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr ==", "except: valstr = ' ' xcell11.text = valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'})", "= ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'})", "\"find more at ...\" links throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td',", "'p', {'class':'smallnote1'}) docnote1.text = 'The header of mappings to other standards provides a", "is # created automatically when the authorization flow completes for the first #", "about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink != '': colcode", "= ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)': xrow.set('style',", "Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion", "APPLICATION_NAME = 'Video Metadata Hub Documentation Generator' # Constant values StdVersion = \"1.3\"", "creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow", "valuesProp[rowcounter][17] except: valstr = ' ' xcell17.text = valstr filename = \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with", "valstr = valuesProp[rowcounter][10] except: valstr = ' ' xcell10.text = valstr xcell11 =", "= \"IPTC-VideoMetadataHub-mapping-Rec_\"+StdVersion+\".html\" with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp,", "'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' '", "all rights reserved. Published under the Creative Commons Attribution 4.0 license <a href=\"http://creativecommons.org/licenses/by/4.0/\">http://creativecommons.org/licenses/by/4.0/</a></p>')", "{'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except: valstr = ' ' xcell15.text = valstr", "iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video Metadata Hub", "valuesProp[rowcounter][13] except: valstr = ' ' xcell13.text = valstr xcell14 = ET.SubElement(xrow, 'td',", "= valstr xcell8 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx]", "token.pickle stores the user's access and refresh tokens, and is # created automatically", "{'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link =", "ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended on ' + IPTCApprovalDate + '.", "moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td class=\"hdrcolIptc\"><a href=\"' + moreatlink + '\"", "'Video Metadata Hub Documentation Generator' # Constant values StdVersion = \"1.3\" HeaderAppendix =", "more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink != '':", "try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text = valstr xcell8", "'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text =", "- Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - NewsML-G2',", "headingtext2 propnote1 = ET.fromstring('<p class=\"note1\">Note on the column headers:<br />EBUcore: based on the", "2020-06-15 BQ: Updated and checked into GitHub \"\"\" from __future__ import print_function import", "printing)' if not valuesProp: print('No Property data found.') else: table = ET.SubElement(body, 'table',", "credentials from storage. If nothing has been stored, or if the stored credentials", "May 2020\" CopyrightYear = \"2020\" def get_credentials(): \"\"\"Gets valid user credentials from storage.", "Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation", "{ 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text", "try: valstr = valuesProp[rowcounter][15] except: valstr = ' ' xcell15.text = valstr xcell16", "throw.append(colcode) else: colcode = ET.fromstring( '<td class=\"hdrcolNoniptc2\"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody')", "stores the user's access and refresh tokens, and is # created automatically when", "colcode = ET.fromstring( '<td class=\"hdrcolNoniptc\"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink !=", "PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text =", "of mappings to other standards provides a link to a table including only", "with a blue header are defined by the Video Metadata Hub, the column", "'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 =", "valstr = valuesProp[rowcounter][9] except: valstr = ' ' xcell9.text = valstr xcell10 =", "ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)': xrow.set('style', 'background-color:", "- D-R-A-F-T - \" IPTCApprovalDate = \"13 May 2020\" IPTCRevisionDate = \"13 May", "ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link =" ]
[]
[ "glob import numpy as np from pprint import pprint as pp g =", "from pprint import pprint as pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath)", "wavpath = g[:10] pp(wavpath) res = [(\"/\".join(wp.split(\"/\")[:-2]), \"/\".join(wp.split(\"/\")[-2:])) for wp in wavpath] pp(res)", "pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath) res = [(\"/\".join(wp.split(\"/\")[:-2]), \"/\".join(wp.split(\"/\")[-2:])) for", "import glob import numpy as np from pprint import pprint as pp g", "import pprint as pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath) res =", "as np from pprint import pprint as pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath =", "g = glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath) res = [(\"/\".join(wp.split(\"/\")[:-2]), \"/\".join(wp.split(\"/\")[-2:])) for wp", "numpy as np from pprint import pprint as pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath", "= glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath) res = [(\"/\".join(wp.split(\"/\")[:-2]), \"/\".join(wp.split(\"/\")[-2:])) for wp in", "glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath) res = [(\"/\".join(wp.split(\"/\")[:-2]), \"/\".join(wp.split(\"/\")[-2:])) for wp in wavpath]", "as pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath) res = [(\"/\".join(wp.split(\"/\")[:-2]), \"/\".join(wp.split(\"/\")[-2:]))", "pprint import pprint as pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath) res", "import numpy as np from pprint import pprint as pp g = glob.glob(\"data/*/*/audio/*.wav\")", "np from pprint import pprint as pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10]", "pprint as pp g = glob.glob(\"data/*/*/audio/*.wav\") wavpath = g[:10] pp(wavpath) res = [(\"/\".join(wp.split(\"/\")[:-2])," ]
[ "} ) collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}} ) def update_join(member: discord.Member, _before_flag,", "\"\"\" Adds new entry in database for new members. :param member: The member", "discord JsonData = Dict[str, Union[str, int]] cluster = MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name]", "current_minutes = int(current_hour) * 60 + int(current_minutes) if current_minutes < join_minutes: daily_time =", "\"name#\": str(member.name + \"#\" + member.discriminator) } collection.insert_one(post) def join(member: discord.Member, before_flag, after_flag):", "config if TYPE_CHECKING: import discord JsonData = Dict[str, Union[str, int]] cluster = MongoClient(config.mongo_client)", "the study channel \"\"\" if before_flag == after_flag: return user_exist = str(collection.find_one({\"_id\": str(member.id)}))", "Updates join data for existing members :param member: The member who joined the", "cluster = MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name] collection: MongoClient = db[config.collection_name] def daily_leaderboard()", "else difference monthly_time = current_minutes if int(now.day) == 1 else difference else: difference", "def resetWeekly(): \"\"\" Resets weekly time of all members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\":", "== 1 else difference else: difference = current_minutes - join_minutes daily_time = difference", "channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\": str(member.id), \"memberTime\": 0,", "int(now.day) == 1 else difference else: difference = current_minutes - join_minutes daily_time =", "new entry in database for new members. :param member: The member who joined", "= datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, { \"$set\": { \"startTime\": now, \"name#\": str(member.name +", "user_exist = str(collection.find_one({\"_id\": str(member.id)})) if user_exist == \"None\": add(member, before_flag, after_flag) else: update_join(member,", "\"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) } collection.insert_one(post) def join(member: discord.Member,", "\"#\" + member.discriminator) } } ) def add(member: discord.Member, _before_flag, _after_flag): \"\"\" Adds", "int(now.weekday()) == 0 else difference monthly_time = current_minutes if int(now.day) == 1 else", "int]] cluster = MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name] collection: MongoClient = db[config.collection_name] def", "monthly time of all members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def end(member: discord.Member):", "user = collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':') join_minutes =", "if str(member) != \"none\" else None def resetDaily(): \"\"\" Resets daily time of", "member who joined the study channel :param _before_flag: The flag before the member", "collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':') join_minutes = int(join_hour) *", "join(member: discord.Member, before_flag, after_flag): \"\"\" Called once member joins study channel. :param member:", ")[:10] def weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard() ->", "entry in database for new members. :param member: The member who joined the", "the study channel :param before_flag: The flag before the member joined the study", "str(member.id)}, {\"$set\": {\"startTime\": 0}} ) def update_join(member: discord.Member, _before_flag, _after_flag): \"\"\" Updates join", "member = collection.find_one({\"_id\": member_id}) return member if str(member) != \"none\" else None def", "before the member joined the study channel :param _after_flag: The flag after the", "TYPE_CHECKING import pymongo from pymongo import MongoClient from pytz import timezone import config", "\"\"\" Called once member joins study channel. :param member: The member who joined", ") collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}} ) def update_join(member: discord.Member, _before_flag, _after_flag):", "channel. \"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\") user = collection.find_one({\"_id\":", "= join_time.split(':') join_minutes = int(join_hour) * 60 + int(join_minutes) current_hour, current_minutes = now_str.split(':')", "The member that left the voice channel. \"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str:", "(1440 - join_minutes) + current_minutes weekly_time = current_minutes if int(now.weekday()) == 0 else", "* 60 + int(join_minutes) current_hour, current_minutes = now_str.split(':') current_minutes = int(current_hour) * 60", "pymongo.DESCENDING) )[:10] def monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard()", "list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10]", "None def resetDaily(): \"\"\" Resets daily time of all members \"\"\" collection.update_many({}, {\"$set\":", "now, \"name#\": str(member.name + \"#\" + member.discriminator) } } ) def add(member: discord.Member,", "\"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)})", "import List, Dict, Union, Optional, TYPE_CHECKING import pymongo from pymongo import MongoClient from", "def weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard() -> List[JsonData]:", "list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10]", "_before_flag, _after_flag): \"\"\" Adds new entry in database for new members. :param member:", "Union[str, int]] cluster = MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name] collection: MongoClient = db[config.collection_name]", "{\"$set\": {\"dailyTime\": 0}}) def resetWeekly(): \"\"\" Resets weekly time of all members \"\"\"", "\"\"\" Resets weekly time of all members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def", "def end(member: discord.Member): \"\"\" Updates total Study time for members when they leave.", "pymongo.DESCENDING) )[:10] def member_details(member_id) -> Optional[JsonData]: member = collection.find_one({\"_id\": member_id}) return member if", "time of all members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def end(member: discord.Member): \"\"\"", "+ \"#\" + member.discriminator) } collection.insert_one(post) def join(member: discord.Member, before_flag, after_flag): \"\"\" Called", "str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\":", "collection.insert_one(post) def join(member: discord.Member, before_flag, after_flag): \"\"\" Called once member joins study channel.", "current_minutes if int(now.day) == 1 else difference else: difference = current_minutes - join_minutes", "pymongo import MongoClient from pytz import timezone import config if TYPE_CHECKING: import discord", "= current_minutes - join_minutes daily_time = difference weekly_time = difference monthly_time = difference", "from datetime import datetime from typing import List, Dict, Union, Optional, TYPE_CHECKING import", "{ \"$inc\": { \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) } }", "collection.update_one( {\"_id\": str(member.id)}, { \"$set\": { \"startTime\": now, \"name#\": str(member.name + \"#\" +", "daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING)", "time of all members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def resetMonthly(): \"\"\" Resets", "str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':') join_minutes = int(join_hour) * 60 + int(join_minutes) current_hour,", "int(current_minutes) if current_minutes < join_minutes: daily_time = current_minutes difference = (1440 - join_minutes)", "{\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}} ) def update_join(member: discord.Member, _before_flag, _after_flag): \"\"\" Updates", "that left the voice channel. \"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str =", "existing members :param member: The member who joined the study channel :param _before_flag:", "voice channel. \"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\") user =", "import datetime from typing import List, Dict, Union, Optional, TYPE_CHECKING import pymongo from", "1 else difference else: difference = current_minutes - join_minutes daily_time = difference weekly_time", "-> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort(", "members. :param member: The member who joined the study channel :param _before_flag: The", ")[:10] ) return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort(", "after the member joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post", "= str(collection.find_one({\"_id\": str(member.id)})) if user_exist == \"None\": add(member, before_flag, after_flag) else: update_join(member, before_flag,", "List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id) -> Optional[JsonData]: member = collection.find_one({\"_id\":", "-> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort(", "weekly_time = current_minutes if int(now.weekday()) == 0 else difference monthly_time = current_minutes if", "member joined the study channel :param _after_flag: The flag after the member joined", "\"#\" + member.discriminator) } collection.insert_one(post) def join(member: discord.Member, before_flag, after_flag): \"\"\" Called once", "= collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':') join_minutes = int(join_hour)", "member.discriminator) } collection.insert_one(post) def join(member: discord.Member, before_flag, after_flag): \"\"\" Called once member joins", "== 0 else difference monthly_time = current_minutes if int(now.day) == 1 else difference", "\"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\": now, \"name#\": str(member.name + \"#\" +", "member joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = {", "after_flag): \"\"\" Called once member joins study channel. :param member: The member who", "study channel :param before_flag: The flag before the member joined the study channel", "before the member joined the study channel :param after_flag: The flag after the", "import MongoClient from pytz import timezone import config if TYPE_CHECKING: import discord JsonData", "Dict[str, Union[str, int]] cluster = MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name] collection: MongoClient =", "joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, {", "the member joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\":", "Resets weekly time of all members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def resetMonthly():", "int(weekly_time), \"dailyTime\": int(daily_time) } } ) collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}} )", "from pymongo import MongoClient from pytz import timezone import config if TYPE_CHECKING: import", "pymongo.DESCENDING) )[:10] def member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id)", "= str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':') join_minutes = int(join_hour) * 60 + int(join_minutes)", "The flag before the member joined the study channel :param _after_flag: The flag", "= current_minutes if int(now.weekday()) == 0 else difference monthly_time = current_minutes if int(now.day)", "= cluster[config.cluster_name] collection: MongoClient = db[config.collection_name] def daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\",", "discord.Member, _before_flag, _after_flag): \"\"\" Adds new entry in database for new members. :param", "def daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort( \"dailyTime\",", "current_minutes difference = (1440 - join_minutes) + current_minutes weekly_time = current_minutes if int(now.weekday())", "\"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def end(member: discord.Member): \"\"\" Updates total Study time", "channel. :param member: The member who joined the study channel :param before_flag: The", "else None def resetDaily(): \"\"\" Resets daily time of all members \"\"\" collection.update_many({},", "= int(join_hour) * 60 + int(join_minutes) current_hour, current_minutes = now_str.split(':') current_minutes = int(current_hour)", "weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard() -> List[JsonData]: return", "+ current_minutes weekly_time = current_minutes if int(now.weekday()) == 0 else difference monthly_time =", "-> List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id) -> Optional[JsonData]: member =", "difference else: difference = current_minutes - join_minutes daily_time = difference weekly_time = difference", "return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING)", "the study channel :param after_flag: The flag after the member joined the study", "import discord JsonData = Dict[str, Union[str, int]] cluster = MongoClient(config.mongo_client) db: MongoClient =", "list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10]", "{\"$set\": {\"startTime\": 0}} ) def update_join(member: discord.Member, _before_flag, _after_flag): \"\"\" Updates join data", "\"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) }", "difference monthly_time = difference collection.update_one( {\"_id\": str(member.id)}, { \"$inc\": { \"memberTime\": int(difference), \"monthlyTime\":", "str(member.name + \"#\" + member.discriminator) } } ) def add(member: discord.Member, _before_flag, _after_flag):", "TYPE_CHECKING: import discord JsonData = Dict[str, Union[str, int]] cluster = MongoClient(config.mongo_client) db: MongoClient", "before_flag == after_flag: return user_exist = str(collection.find_one({\"_id\": str(member.id)})) if user_exist == \"None\": add(member,", "difference monthly_time = current_minutes if int(now.day) == 1 else difference else: difference =", "members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def resetMonthly(): \"\"\" Resets monthly time of", ":param after_flag: The flag after the member joined the study channel \"\"\" if", ") def add(member: discord.Member, _before_flag, _after_flag): \"\"\" Adds new entry in database for", "the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, { \"$set\":", "The flag after the member joined the study channel \"\"\" now: str =", "joined the study channel :param after_flag: The flag after the member joined the", "\"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def", "members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def end(member: discord.Member): \"\"\" Updates total Study", "str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, { \"$set\": { \"startTime\": now, \"name#\": str(member.name", "\"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id) -> Optional[JsonData]: member = collection.find_one({\"_id\": member_id}) return member", "JsonData = Dict[str, Union[str, int]] cluster = MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name] collection:", "{\"_id\": str(member.id)}, { \"$inc\": { \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time)", "channel :param _after_flag: The flag after the member joined the study channel \"\"\"", "daily_time = difference weekly_time = difference monthly_time = difference collection.update_one( {\"_id\": str(member.id)}, {", "left the voice channel. \"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\")", "resetDaily(): \"\"\" Resets daily time of all members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}})", "leave. :param member: The member that left the voice channel. \"\"\" now: datetime", "now, \"name#\": str(member.name + \"#\" + member.discriminator) } collection.insert_one(post) def join(member: discord.Member, before_flag,", "after the member joined the study channel \"\"\" if before_flag == after_flag: return", "discord.Member): \"\"\" Updates total Study time for members when they leave. :param member:", "the study channel :param _before_flag: The flag before the member joined the study", "\"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def resetWeekly(): \"\"\" Resets weekly time of all", "\"\"\" Updates join data for existing members :param member: The member who joined", "join_minutes: daily_time = current_minutes difference = (1440 - join_minutes) + current_minutes weekly_time =", "member who joined the study channel :param before_flag: The flag before the member", "the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\": str(member.id),", "The member who joined the study channel :param before_flag: The flag before the", "List, Dict, Union, Optional, TYPE_CHECKING import pymongo from pymongo import MongoClient from pytz", "the voice channel. \"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\") user", "\"\"\" if before_flag == after_flag: return user_exist = str(collection.find_one({\"_id\": str(member.id)})) if user_exist ==", "datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"]) join_hour,", "0}}) def end(member: discord.Member): \"\"\" Updates total Study time for members when they", "\"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def resetMonthly(): \"\"\" Resets monthly time of all", "now_str.split(':') current_minutes = int(current_hour) * 60 + int(current_minutes) if current_minutes < join_minutes: daily_time", "\"dailyTime\": int(daily_time) } } ) collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}} ) def", "weekly_time = difference monthly_time = difference collection.update_one( {\"_id\": str(member.id)}, { \"$inc\": { \"memberTime\":", "members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def resetWeekly(): \"\"\" Resets weekly time of", "= int(current_hour) * 60 + int(current_minutes) if current_minutes < join_minutes: daily_time = current_minutes", "Called once member joins study channel. :param member: The member who joined the", "current_minutes if int(now.weekday()) == 0 else difference monthly_time = current_minutes if int(now.day) ==", "str(member.id), \"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\": now, \"name#\": str(member.name", "\"dailyTime\": 0, \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) } collection.insert_one(post) def", "0 else difference monthly_time = current_minutes if int(now.day) == 1 else difference else:", "MongoClient = db[config.collection_name] def daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] )", "\"\"\" Updates total Study time for members when they leave. :param member: The", "Resets daily time of all members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def resetWeekly():", "joined the study channel :param before_flag: The flag before the member joined the", "after_flag: The flag after the member joined the study channel \"\"\" if before_flag", "- join_minutes) + current_minutes weekly_time = current_minutes if int(now.weekday()) == 0 else difference", "return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING)", "joined the study channel :param _before_flag: The flag before the member joined the", "channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, { \"$set\": { \"startTime\":", "{\"monthlyTime\": 0}}) def end(member: discord.Member): \"\"\" Updates total Study time for members when", "_after_flag): \"\"\" Adds new entry in database for new members. :param member: The", "post = { \"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\": 0,", "Adds new entry in database for new members. :param member: The member who", "join_time.split(':') join_minutes = int(join_hour) * 60 + int(join_minutes) current_hour, current_minutes = now_str.split(':') current_minutes", "== after_flag: return user_exist = str(collection.find_one({\"_id\": str(member.id)})) if user_exist == \"None\": add(member, before_flag,", "joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\":", "{ \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) } } ) collection.update_one(", "database for new members. :param member: The member who joined the study channel", "List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"memberTime\",", "int(daily_time) } } ) collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}} ) def update_join(member:", "datetime import datetime from typing import List, Dict, Union, Optional, TYPE_CHECKING import pymongo", "join_minutes = int(join_hour) * 60 + int(join_minutes) current_hour, current_minutes = now_str.split(':') current_minutes =", "= datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"])", "current_hour, current_minutes = now_str.split(':') current_minutes = int(current_hour) * 60 + int(current_minutes) if current_minutes", "\"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\":", "str(member.id)}) join_time = str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':') join_minutes = int(join_hour) * 60", "joined the study channel :param _after_flag: The flag after the member joined the", "study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, { \"$set\": {", "= datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\": 0,", "def member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id) -> Optional[JsonData]:", "def resetDaily(): \"\"\" Resets daily time of all members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\":", "\"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def", "collection.find_one({\"_id\": member_id}) return member if str(member) != \"none\" else None def resetDaily(): \"\"\"", "0}}) def resetMonthly(): \"\"\" Resets monthly time of all members. \"\"\" collection.update_many({}, {\"$set\":", "collection: MongoClient = db[config.collection_name] def daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10]", "from __future__ import annotations from datetime import datetime from typing import List, Dict,", "The flag after the member joined the study channel \"\"\" if before_flag ==", "discord.Member, before_flag, after_flag): \"\"\" Called once member joins study channel. :param member: The", "once member joins study channel. :param member: The member who joined the study", "current_minutes weekly_time = current_minutes if int(now.weekday()) == 0 else difference monthly_time = current_minutes", "+ member.discriminator) } } ) def add(member: discord.Member, _before_flag, _after_flag): \"\"\" Adds new", "import annotations from datetime import datetime from typing import List, Dict, Union, Optional,", "list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id) -> Optional[JsonData]: member = collection.find_one({\"_id\": member_id}) return", "member joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)},", "for existing members :param member: The member who joined the study channel :param", "= current_minutes difference = (1440 - join_minutes) + current_minutes weekly_time = current_minutes if", "else: difference = current_minutes - join_minutes daily_time = difference weekly_time = difference monthly_time", "join_minutes daily_time = difference weekly_time = difference monthly_time = difference collection.update_one( {\"_id\": str(member.id)},", "join_hour, join_minutes = join_time.split(':') join_minutes = int(join_hour) * 60 + int(join_minutes) current_hour, current_minutes", "\"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) } } ) collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}}", "pymongo from pymongo import MongoClient from pytz import timezone import config if TYPE_CHECKING:", "{\"weeklyTime\": 0}}) def resetMonthly(): \"\"\" Resets monthly time of all members. \"\"\" collection.update_many({},", "members :param member: The member who joined the study channel :param _before_flag: The", ")[:10] def member_details(member_id) -> Optional[JsonData]: member = collection.find_one({\"_id\": member_id}) return member if str(member)", "int(join_hour) * 60 + int(join_minutes) current_hour, current_minutes = now_str.split(':') current_minutes = int(current_hour) *", "member_id}) return member if str(member) != \"none\" else None def resetDaily(): \"\"\" Resets", "{\"$set\": {\"monthlyTime\": 0}}) def end(member: discord.Member): \"\"\" Updates total Study time for members", "member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id) -> Optional[JsonData]: member", "difference = current_minutes - join_minutes daily_time = difference weekly_time = difference monthly_time =", "import timezone import config if TYPE_CHECKING: import discord JsonData = Dict[str, Union[str, int]]", "resetWeekly(): \"\"\" Resets weekly time of all members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}})", "joins study channel. :param member: The member who joined the study channel :param", "member joined the study channel \"\"\" if before_flag == after_flag: return user_exist =", "total Study time for members when they leave. :param member: The member that", "study channel :param _before_flag: The flag before the member joined the study channel", "for new members. :param member: The member who joined the study channel :param", "< join_minutes: daily_time = current_minutes difference = (1440 - join_minutes) + current_minutes weekly_time", "difference collection.update_one( {\"_id\": str(member.id)}, { \"$inc\": { \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time),", "} } ) def add(member: discord.Member, _before_flag, _after_flag): \"\"\" Adds new entry in", "they leave. :param member: The member that left the voice channel. \"\"\" now:", "Dict, Union, Optional, TYPE_CHECKING import pymongo from pymongo import MongoClient from pytz import", "\"$set\": { \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) } } )", "def join(member: discord.Member, before_flag, after_flag): \"\"\" Called once member joins study channel. :param", "Optional, TYPE_CHECKING import pymongo from pymongo import MongoClient from pytz import timezone import", "\"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) } } ) collection.update_one( {\"_id\":", "= { \"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\":", "of all members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def resetWeekly(): \"\"\" Resets weekly", "join_minutes = join_time.split(':') join_minutes = int(join_hour) * 60 + int(join_minutes) current_hour, current_minutes =", ":param before_flag: The flag before the member joined the study channel :param after_flag:", "} } ) collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}} ) def update_join(member: discord.Member,", "Study time for members when they leave. :param member: The member that left", "_before_flag: The flag before the member joined the study channel :param _after_flag: The", "= difference collection.update_one( {\"_id\": str(member.id)}, { \"$inc\": { \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\":", "= Dict[str, Union[str, int]] cluster = MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name] collection: MongoClient", "pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard() -> List[JsonData]: return", "{\"$set\": {\"weeklyTime\": 0}}) def resetMonthly(): \"\"\" Resets monthly time of all members. \"\"\"", "end(member: discord.Member): \"\"\" Updates total Study time for members when they leave. :param", "int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) } } ) collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\":", "the member joined the study channel \"\"\" if before_flag == after_flag: return user_exist", "db: MongoClient = cluster[config.cluster_name] collection: MongoClient = db[config.collection_name] def daily_leaderboard() -> List[JsonData]: print(", "joined the study channel \"\"\" if before_flag == after_flag: return user_exist = str(collection.find_one({\"_id\":", "{ \"$set\": { \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) } }", "The flag before the member joined the study channel :param after_flag: The flag", "datetime from typing import List, Dict, Union, Optional, TYPE_CHECKING import pymongo from pymongo", ":param _before_flag: The flag before the member joined the study channel :param _after_flag:", "MongoClient = cluster[config.cluster_name] collection: MongoClient = db[config.collection_name] def daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort(", "monthly_time = difference collection.update_one( {\"_id\": str(member.id)}, { \"$inc\": { \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time),", "time for members when they leave. :param member: The member that left the", "str(member.name + \"#\" + member.discriminator) } collection.insert_one(post) def join(member: discord.Member, before_flag, after_flag): \"\"\"", "member_details(member_id) -> Optional[JsonData]: member = collection.find_one({\"_id\": member_id}) return member if str(member) != \"none\"", "member: The member who joined the study channel :param before_flag: The flag before", "now_str: str = now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"]) join_hour, join_minutes", "} ) def add(member: discord.Member, _before_flag, _after_flag): \"\"\" Adds new entry in database", "channel :param _before_flag: The flag before the member joined the study channel :param", "monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard() -> List[JsonData]: return", "of all members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def resetMonthly(): \"\"\" Resets monthly", "if current_minutes < join_minutes: daily_time = current_minutes difference = (1440 - join_minutes) +", "_after_flag: The flag after the member joined the study channel \"\"\" now: str", "difference = (1440 - join_minutes) + current_minutes weekly_time = current_minutes if int(now.weekday()) ==", "before_flag: The flag before the member joined the study channel :param after_flag: The", "\"$inc\": { \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) } } )", "members when they leave. :param member: The member that left the voice channel.", "str(member.id)}, { \"$inc\": { \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) }", "MongoClient from pytz import timezone import config if TYPE_CHECKING: import discord JsonData =", "__future__ import annotations from datetime import datetime from typing import List, Dict, Union,", "list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard() ->", "MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name] collection: MongoClient = db[config.collection_name] def daily_leaderboard() -> List[JsonData]:", "= MongoClient(config.mongo_client) db: MongoClient = cluster[config.cluster_name] collection: MongoClient = db[config.collection_name] def daily_leaderboard() ->", "+ int(join_minutes) current_hour, current_minutes = now_str.split(':') current_minutes = int(current_hour) * 60 + int(current_minutes)", "collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def resetWeekly(): \"\"\" Resets weekly time of all members", "60 + int(join_minutes) current_hour, current_minutes = now_str.split(':') current_minutes = int(current_hour) * 60 +", "The member who joined the study channel :param _before_flag: The flag before the", "if TYPE_CHECKING: import discord JsonData = Dict[str, Union[str, int]] cluster = MongoClient(config.mongo_client) db:", "+ member.discriminator) } collection.insert_one(post) def join(member: discord.Member, before_flag, after_flag): \"\"\" Called once member", "current_minutes < join_minutes: daily_time = current_minutes difference = (1440 - join_minutes) + current_minutes", "new members. :param member: The member who joined the study channel :param _before_flag:", "\"\"\" Resets monthly time of all members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def", "} collection.insert_one(post) def join(member: discord.Member, before_flag, after_flag): \"\"\" Called once member joins study", "now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)}) join_time", "channel :param before_flag: The flag before the member joined the study channel :param", "!= \"none\" else None def resetDaily(): \"\"\" Resets daily time of all members", "def resetMonthly(): \"\"\" Resets monthly time of all members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\":", "after_flag: return user_exist = str(collection.find_one({\"_id\": str(member.id)})) if user_exist == \"None\": add(member, before_flag, after_flag)", "study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\": str(member.id), \"memberTime\":", "update_join(member: discord.Member, _before_flag, _after_flag): \"\"\" Updates join data for existing members :param member:", "def update_join(member: discord.Member, _before_flag, _after_flag): \"\"\" Updates join data for existing members :param", "member joined the study channel :param after_flag: The flag after the member joined", "datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str = now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)}) join_time =", "List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def", "str(member) != \"none\" else None def resetDaily(): \"\"\" Resets daily time of all", "when they leave. :param member: The member that left the voice channel. \"\"\"", "collection.update_one( {\"_id\": str(member.id)}, { \"$inc\": { \"memberTime\": int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\":", "after the member joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one(", "= current_minutes if int(now.day) == 1 else difference else: difference = current_minutes -", "datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\":", "for members when they leave. :param member: The member that left the voice", "if int(now.weekday()) == 0 else difference monthly_time = current_minutes if int(now.day) == 1", "current_minutes = now_str.split(':') current_minutes = int(current_hour) * 60 + int(current_minutes) if current_minutes <", "Updates total Study time for members when they leave. :param member: The member", "collection.update_one( {\"_id\": str(member.id)}, {\"$set\": {\"startTime\": 0}} ) def update_join(member: discord.Member, _before_flag, _after_flag): \"\"\"", "= difference weekly_time = difference monthly_time = difference collection.update_one( {\"_id\": str(member.id)}, { \"$inc\":", "all members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def resetWeekly(): \"\"\" Resets weekly time", "\"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, { \"$set\": { \"startTime\": now,", "{ \"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\": now,", "from typing import List, Dict, Union, Optional, TYPE_CHECKING import pymongo from pymongo import", ":param member: The member that left the voice channel. \"\"\" now: datetime =", "{ \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) } } ) def", "= (1440 - join_minutes) + current_minutes weekly_time = current_minutes if int(now.weekday()) == 0", "now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post = { \"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\": 0,", "the study channel :param _after_flag: The flag after the member joined the study", "all members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def resetMonthly(): \"\"\" Resets monthly time", "0, \"dailyTime\": 0, \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) } collection.insert_one(post)", "study channel :param _after_flag: The flag after the member joined the study channel", "int(join_minutes) current_hour, current_minutes = now_str.split(':') current_minutes = int(current_hour) * 60 + int(current_minutes) if", "db[config.collection_name] def daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort(", "= db[config.collection_name] def daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] ) return", "of all members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def end(member: discord.Member): \"\"\" Updates", "def add(member: discord.Member, _before_flag, _after_flag): \"\"\" Adds new entry in database for new", ")[:10] def member_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id) ->", "0, \"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator)", "print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard()", "collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def end(member: discord.Member): \"\"\" Updates total Study time for", ")[:10] def monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard() ->", "\"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) } } ) def add(member:", "typing import List, Dict, Union, Optional, TYPE_CHECKING import pymongo from pymongo import MongoClient", "pymongo.DESCENDING) )[:10] def weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard()", "return user_exist = str(collection.find_one({\"_id\": str(member.id)})) if user_exist == \"None\": add(member, before_flag, after_flag) else:", "member that left the voice channel. \"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata')) now_str: str", "0}}) def resetWeekly(): \"\"\" Resets weekly time of all members \"\"\" collection.update_many({}, {\"$set\":", "str(member.id)}, { \"$set\": { \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) }", "Resets monthly time of all members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def end(member:", "in database for new members. :param member: The member who joined the study", "join_time = str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':') join_minutes = int(join_hour) * 60 +", "str = now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"]) join_hour, join_minutes =", "now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':') join_minutes", "0, \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator) } collection.insert_one(post) def join(member:", "\"dailyTime\", pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard() -> List[JsonData]:", "-> Optional[JsonData]: member = collection.find_one({\"_id\": member_id}) return member if str(member) != \"none\" else", "all members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}}) def end(member: discord.Member): \"\"\" Updates total", ":param member: The member who joined the study channel :param _before_flag: The flag", "\"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def", "discord.Member, _before_flag, _after_flag): \"\"\" Updates join data for existing members :param member: The", "\"none\" else None def resetDaily(): \"\"\" Resets daily time of all members \"\"\"", "= now.strftime(\"%H:%M\") user = collection.find_one({\"_id\": str(member.id)}) join_time = str(user[\"startTime\"]) join_hour, join_minutes = join_time.split(':')", "str(collection.find_one({\"_id\": str(member.id)})) if user_exist == \"None\": add(member, before_flag, after_flag) else: update_join(member, before_flag, after_flag)", "\"\"\" Resets daily time of all members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def", "who joined the study channel :param before_flag: The flag before the member joined", "pytz import timezone import config if TYPE_CHECKING: import discord JsonData = Dict[str, Union[str,", "join_minutes) + current_minutes weekly_time = current_minutes if int(now.weekday()) == 0 else difference monthly_time", "weekly time of all members \"\"\" collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def resetMonthly(): \"\"\"", "+ \"#\" + member.discriminator) } } ) def add(member: discord.Member, _before_flag, _after_flag): \"\"\"", "member: The member who joined the study channel :param _before_flag: The flag before", "monthly_time = current_minutes if int(now.day) == 1 else difference else: difference = current_minutes", ":param member: The member who joined the study channel :param before_flag: The flag", "def monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING) )[:10] def member_leaderboard() -> List[JsonData]:", "return member if str(member) != \"none\" else None def resetDaily(): \"\"\" Resets daily", "channel \"\"\" if before_flag == after_flag: return user_exist = str(collection.find_one({\"_id\": str(member.id)})) if user_exist", "from pytz import timezone import config if TYPE_CHECKING: import discord JsonData = Dict[str,", "who joined the study channel :param _before_flag: The flag before the member joined", "-> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] ) return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10]", "daily_time = current_minutes difference = (1440 - join_minutes) + current_minutes weekly_time = current_minutes", "return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\", pymongo.DESCENDING)", "60 + int(current_minutes) if current_minutes < join_minutes: daily_time = current_minutes difference = (1440", "resetMonthly(): \"\"\" Resets monthly time of all members. \"\"\" collection.update_many({}, {\"$set\": {\"monthlyTime\": 0}})", "member.discriminator) } } ) def add(member: discord.Member, _before_flag, _after_flag): \"\"\" Adds new entry", "flag after the member joined the study channel \"\"\" if before_flag == after_flag:", "= collection.find_one({\"_id\": member_id}) return member if str(member) != \"none\" else None def resetDaily():", "difference weekly_time = difference monthly_time = difference collection.update_one( {\"_id\": str(member.id)}, { \"$inc\": {", "= now_str.split(':') current_minutes = int(current_hour) * 60 + int(current_minutes) if current_minutes < join_minutes:", ":param _after_flag: The flag after the member joined the study channel \"\"\" now:", "daily time of all members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def resetWeekly(): \"\"\"", "now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, { \"$set\": { \"startTime\": now, \"name#\":", "- join_minutes daily_time = difference weekly_time = difference monthly_time = difference collection.update_one( {\"_id\":", "= difference monthly_time = difference collection.update_one( {\"_id\": str(member.id)}, { \"$inc\": { \"memberTime\": int(difference),", "Optional[JsonData]: member = collection.find_one({\"_id\": member_id}) return member if str(member) != \"none\" else None", "add(member: discord.Member, _before_flag, _after_flag): \"\"\" Adds new entry in database for new members.", "import config if TYPE_CHECKING: import discord JsonData = Dict[str, Union[str, int]] cluster =", "if before_flag == after_flag: return user_exist = str(collection.find_one({\"_id\": str(member.id)})) if user_exist == \"None\":", "annotations from datetime import datetime from typing import List, Dict, Union, Optional, TYPE_CHECKING", "timezone import config if TYPE_CHECKING: import discord JsonData = Dict[str, Union[str, int]] cluster", "data for existing members :param member: The member who joined the study channel", "member if str(member) != \"none\" else None def resetDaily(): \"\"\" Resets daily time", "else difference else: difference = current_minutes - join_minutes daily_time = difference weekly_time =", "cluster[config.cluster_name] collection: MongoClient = db[config.collection_name] def daily_leaderboard() -> List[JsonData]: print( list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING)", "flag before the member joined the study channel :param after_flag: The flag after", "import pymongo from pymongo import MongoClient from pytz import timezone import config if", "_after_flag): \"\"\" Updates join data for existing members :param member: The member who", "{\"dailyTime\": 0}}) def resetWeekly(): \"\"\" Resets weekly time of all members \"\"\" collection.update_many({},", "{\"_id\": str(member.id)}, { \"$set\": { \"startTime\": now, \"name#\": str(member.name + \"#\" + member.discriminator)", "study channel :param after_flag: The flag after the member joined the study channel", "study channel. :param member: The member who joined the study channel :param before_flag:", "0}} ) def update_join(member: discord.Member, _before_flag, _after_flag): \"\"\" Updates join data for existing", "time of all members \"\"\" collection.update_many({}, {\"$set\": {\"dailyTime\": 0}}) def resetWeekly(): \"\"\" Resets", "if int(now.day) == 1 else difference else: difference = current_minutes - join_minutes daily_time", "study channel \"\"\" if before_flag == after_flag: return user_exist = str(collection.find_one({\"_id\": str(member.id)})) if", ") def update_join(member: discord.Member, _before_flag, _after_flag): \"\"\" Updates join data for existing members", "\"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\": now, \"name#\": str(member.name +", "+ int(current_minutes) if current_minutes < join_minutes: daily_time = current_minutes difference = (1440 -", "flag before the member joined the study channel :param _after_flag: The flag after", "the member joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") post =", "the member joined the study channel :param _after_flag: The flag after the member", "\"name#\": str(member.name + \"#\" + member.discriminator) } } ) def add(member: discord.Member, _before_flag,", ") return list(collection.find({}).sort( \"dailyTime\", pymongo.DESCENDING) )[:10] def weekly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\",", "\"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) } } ) collection.update_one( {\"_id\": str(member.id)}, {\"$set\":", "the member joined the study channel :param after_flag: The flag after the member", "return list(collection.find({}).sort( \"memberTime\", pymongo.DESCENDING) )[:10] def member_details(member_id) -> Optional[JsonData]: member = collection.find_one({\"_id\": member_id})", "def member_details(member_id) -> Optional[JsonData]: member = collection.find_one({\"_id\": member_id}) return member if str(member) !=", "\"_id\": str(member.id), \"memberTime\": 0, \"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\": now, \"name#\":", "before_flag, after_flag): \"\"\" Called once member joins study channel. :param member: The member", "channel :param after_flag: The flag after the member joined the study channel \"\"\"", "flag after the member joined the study channel \"\"\" now: str = datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\")", "0, \"monthlyTime\": 0, \"weeklyTime\": 0, \"dailyTime\": 0, \"startTime\": now, \"name#\": str(member.name + \"#\"", "{\"startTime\": 0}} ) def update_join(member: discord.Member, _before_flag, _after_flag): \"\"\" Updates join data for", "member: The member that left the voice channel. \"\"\" now: datetime = datetime.now(timezone('Asia/Kolkata'))", "member joins study channel. :param member: The member who joined the study channel", "current_minutes - join_minutes daily_time = difference weekly_time = difference monthly_time = difference collection.update_one(", "join data for existing members :param member: The member who joined the study", "int(difference), \"monthlyTime\": int(monthly_time), \"weeklyTime\": int(weekly_time), \"dailyTime\": int(daily_time) } } ) collection.update_one( {\"_id\": str(member.id)},", "int(current_hour) * 60 + int(current_minutes) if current_minutes < join_minutes: daily_time = current_minutes difference", "Union, Optional, TYPE_CHECKING import pymongo from pymongo import MongoClient from pytz import timezone", "collection.update_many({}, {\"$set\": {\"weeklyTime\": 0}}) def resetMonthly(): \"\"\" Resets monthly time of all members.", "* 60 + int(current_minutes) if current_minutes < join_minutes: daily_time = current_minutes difference =", "datetime.now(timezone('Asia/Kolkata')).strftime(\"%H:%M\") collection.update_one( {\"_id\": str(member.id)}, { \"$set\": { \"startTime\": now, \"name#\": str(member.name + \"#\"", "_before_flag, _after_flag): \"\"\" Updates join data for existing members :param member: The member", "List[JsonData]: return list(collection.find({}).sort( \"weeklyTime\", pymongo.DESCENDING) )[:10] def monthly_leaderboard() -> List[JsonData]: return list(collection.find({}).sort( \"monthlyTime\"," ]
[ "uniprot_list: if len(uniprot) != 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as uniprot2string_file:", "+= 1 print('Done storing. Number of errors: {}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set) -", "uniprot_set = set() with open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline() for line in assay2target_file:", "Mapped uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors)) if __name__ == '__main__': ''' Assay ID\\tTarget", "line in uniprot2string_file: line = line.strip().split('\\t') uniprot = line[0] string_id = line[1] valid_string_set.add(string_id)", "uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set = set() for line in", "print('Done storing. Number of errors: {}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors)) if", "import pandas as pd import requests import urllib import argparse import urllib.request import", "as p: string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0 with open('uniprot_without_strid.txt', 'w') as", "with open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set = set() for line in uniprot2string_file: line", "uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors)) if __name__ == '__main__': ''' Assay ID\\tTarget ID\\tTarget", "'' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot to string to uniprot2string.tsv...') with Pool(args.n_proc)", "string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0 with open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv',", "= conn.read().decode(\"utf-8\") except HTTPError: data = '' if data: root = ET.fromstring(data) string_id_result", "in uniprot2string_file: line = line.strip().split('\\t') uniprot = line[0] string_id = line[1] valid_string_set.add(string_id) mapping_to_string_API(valid_string_set)", "\"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url) valid_string_set = list(valid_string_set) params = { \"identifiers\": \"%0d\".join(valid_string_set),", "string_id_result.text print('error on {}: {}'.format(uniprot, data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from", "target information') args = parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\"", "Pool from tqdm import tqdm from time import sleep from requests.models import HTTPError", "argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number of processes to run when downloading assay &", "open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline() for line in assay2target_file: line = line.strip().split('\\t') uniprot_list", "p2, experimental_score), file=string_ppi_file) pair_count += 1 if experimental_score > 0.2: pos_pair_count += 1", "= float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2,", "output_format, method]) print('request_url\\t', request_url) valid_string_set = list(valid_string_set) params = { \"identifiers\": \"%0d\".join(valid_string_set), #", "len(uniprot_set) - num_errors)) if __name__ == '__main__': ''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list}", "ET.fromstring(data) string_id_result = root.find('record/stringId') if string_id_result is not None: return string_id_result.text print('error on", "open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count, pos_pair_count = 0, 0 for line in response.text.strip().split(\"\\n\"):", "if data: root = ET.fromstring(data) string_id_result = root.find('record/stringId') if string_id_result is not None:", "root = ET.fromstring(data) string_id_result = root.find('record/stringId') if string_id_result is not None: return string_id_result.text", "print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count += 1 if experimental_score > 0.2: pos_pair_count +=", "run when downloading assay & target information') args = parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url", "'\\t', pos_pair_count) print() def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as conn:", "(prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count += 1 if experimental_score", "= '{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob. %.3f)\"", "to string to uniprot2string.tsv...') with Pool(args.n_proc) as p: string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors", "r, open('uniprot2string.tsv', 'w') as g: for uniprot, string_id in zip(uniprot_set, string_id_set): if string_id:", "num_errors = 0 with open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv', 'w') as g: for", "import sleep from requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this:", "default=12, help='number of processes to run when downloading assay & target information') args", "https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number of", "downloading assay & target information') args = parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\"", "= 0, 0 for line in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1, p2 =", "num_errors)) if __name__ == '__main__': ''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname", "in uniprot_list: if len(uniprot) != 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as", "pair_count, pos_pair_count = 0, 0 for line in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1,", "uniprot2string.tsv...') with Pool(args.n_proc) as p: string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0 with", "of errors: {}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors)) if __name__ == '__main__':", "uniprot to string to uniprot2string.tsv...') with Pool(args.n_proc) as p: string_id_set = p.map(query_stringid, tqdm(uniprot_set))", "l = line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10]) # print(\"\\t\".join([p1,", "on {}: {}'.format(uniprot, data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot to", "= requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count, pos_pair_count = 0,", "with open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv', 'w') as g: for uniprot, string_id in", "multiprocessing import Pool from tqdm import tqdm from time import sleep from requests.models", "'\\n') num_errors += 1 print('Done storing. Number of errors: {}. Mapped uniprots: {}'.format(num_errors,", "print('Storing mapping from uniprot to string to uniprot2string.tsv...') with Pool(args.n_proc) as p: string_id_set", "string to uniprot2string.tsv...') with Pool(args.n_proc) as p: string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors =", "= \"network\" request_url = \"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url) valid_string_set = list(valid_string_set) params", "from tqdm import tqdm from time import sleep from requests.models import HTTPError '''", "not None: return string_id_result.text print('error on {}: {}'.format(uniprot, data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set):", "\"%0d\".join(valid_string_set), # your protein \"species\": 9606, # species NCBI identifier } print('len of", "line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally", "from uniprot to string to uniprot2string.tsv...') with Pool(args.n_proc) as p: string_id_set = p.map(query_stringid,", "9606, # species NCBI identifier } print('len of genes\\t', len(valid_string_set)) response = requests.post(request_url,", "line in assay2target_file: line = line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') # print(uniprot_list) for uniprot", "} print('len of genes\\t', len(valid_string_set)) response = requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv', 'w')", "data=params) print(response) with open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count, pos_pair_count = 0, 0 for", "len(uniprot) != 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set =", "sleep from requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access", "if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot + '\\n') num_errors += 1 print('Done storing.", "processes to run when downloading assay & target information') args = parser.parse_args() def", "file=string_ppi_file) pair_count += 1 if experimental_score > 0.2: pos_pair_count += 1 print(pair_count, '\\t',", "to uniprot2string.tsv...') with Pool(args.n_proc) as p: string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0", "as pd import requests import urllib import argparse import urllib.request import xml.etree.ElementTree as", "'w') as r, open('uniprot2string.tsv', 'w') as g: for uniprot, string_id in zip(uniprot_set, string_id_set):", "open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set = set() for line in uniprot2string_file: line =", "store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot to string to uniprot2string.tsv...') with Pool(args.n_proc) as p:", "import xml.etree.ElementTree as ET from multiprocessing import Pool from tqdm import tqdm from", "parser.add_argument('--n-proc', type=int, default=12, help='number of processes to run when downloading assay & target", "!= 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set = set()", "data = conn.read().decode(\"utf-8\") except HTTPError: data = '' if data: root = ET.fromstring(data)", "zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot + '\\n') num_errors += 1", "response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10]) #", "'{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob. %.3f)\" %", "open('uniprot2string.tsv', 'w') as g: for uniprot, string_id in zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot,", "0.2: pos_pair_count += 1 print(pair_count, '\\t', pos_pair_count) print() def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot)", "for line in uniprot2string_file: line = line.strip().split('\\t') uniprot = line[0] string_id = line[1]", "tqdm import tqdm from time import sleep from requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713", "of genes\\t', len(valid_string_set)) response = requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv', 'w') as string_ppi_file:", "ET from multiprocessing import Pool from tqdm import tqdm from time import sleep", "to run when downloading assay & target information') args = parser.parse_args() def mapping_to_string_API(valid_string_set):", "pos_pair_count = 0, 0 for line in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1, p2", "experimental_score > 0.2: pos_pair_count += 1 print(pair_count, '\\t', pos_pair_count) print() def query_stringid(uniprot): website", "pos_pair_count += 1 print(pair_count, '\\t', pos_pair_count) print() def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try:", "pd import requests import urllib import argparse import urllib.request import xml.etree.ElementTree as ET", "== '__main__': ''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname = 'assay2target.tsv' uniprot_set", "= line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') # print(uniprot_list) for uniprot in uniprot_list: if len(uniprot)", "Number of errors: {}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors)) if __name__ ==", "'' if data: root = ET.fromstring(data) string_id_result = root.find('record/stringId') if string_id_result is not", "'r') as uniprot2string_file: valid_string_set = set() for line in uniprot2string_file: line = line.strip().split('\\t')", "valid_string_set = set() for line in uniprot2string_file: line = line.strip().split('\\t') uniprot = line[0]", "your protein \"species\": 9606, # species NCBI identifier } print('len of genes\\t', len(valid_string_set))", "\"tsv-no-header\" method = \"network\" request_url = \"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url) valid_string_set =", "website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as conn: data = conn.read().decode(\"utf-8\") except HTTPError:", "print('len of genes\\t', len(valid_string_set)) response = requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv', 'w') as", "''' assay2target_fname = 'assay2target.tsv' uniprot_set = set() with open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline()", "data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot to string to uniprot2string.tsv...')", "for line in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score", "p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob.", "data = '' if data: root = ET.fromstring(data) string_id_result = root.find('record/stringId') if string_id_result", "list} ''' assay2target_fname = 'assay2target.tsv' uniprot_set = set() with open(assay2target_fname, 'r') as assay2target_file:", "''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number of processes to run when", "http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int,", "= line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10]) # print(\"\\t\".join([p1, p2,", "= 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as conn: data = conn.read().decode(\"utf-8\") except HTTPError: data", "print(pair_count, '\\t', pos_pair_count) print() def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as", "string_id_result = root.find('record/stringId') if string_id_result is not None: return string_id_result.text print('error on {}:", "{}'.format(uniprot, data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot to string to", "query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as conn: data = conn.read().decode(\"utf-8\") except", "experimental_score), file=string_ppi_file) pair_count += 1 if experimental_score > 0.2: pos_pair_count += 1 print(pair_count,", "if len(uniprot) != 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set", "print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count", "https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number", "requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count, pos_pair_count = 0, 0", "0, 0 for line in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]),", "valid_string_set = list(valid_string_set) params = { \"identifiers\": \"%0d\".join(valid_string_set), # your protein \"species\": 9606,", "argparse import urllib.request import xml.etree.ElementTree as ET from multiprocessing import Pool from tqdm", "p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0 with open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv', 'w') as", "%.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count += 1 if experimental_score >", "= \"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url) valid_string_set = list(valid_string_set) params = { \"identifiers\":", "print() def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as conn: data =", "errors: {}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors)) if __name__ == '__main__': '''", "p2, \"experimentally confirmed (prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count +=", "+ '\\n') num_errors += 1 print('Done storing. Number of errors: {}. Mapped uniprots:", "if __name__ == '__main__': ''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname =", "as conn: data = conn.read().decode(\"utf-8\") except HTTPError: data = '' if data: root", "print('request_url\\t', request_url) valid_string_set = list(valid_string_set) params = { \"identifiers\": \"%0d\".join(valid_string_set), # your protein", "store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set = set() for line in uniprot2string_file:", "conn.read().decode(\"utf-8\") except HTTPError: data = '' if data: root = ET.fromstring(data) string_id_result =", "assay2target_fname = 'assay2target.tsv' uniprot_set = set() with open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline() for", "print(response) with open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count, pos_pair_count = 0, 0 for line", "string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot + '\\n') num_errors += 1 print('Done", "urllib.request.urlopen(website) as conn: data = conn.read().decode(\"utf-8\") except HTTPError: data = '' if data:", "request_url = \"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url) valid_string_set = list(valid_string_set) params = {", "= parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method = \"network\"", "string_id)) else: r.write(uniprot + '\\n') num_errors += 1 print('Done storing. Number of errors:", "& target information') args = parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format =", "time import sleep from requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check", "{}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors)) if __name__ == '__main__': ''' Assay", "\"species\": 9606, # species NCBI identifier } print('len of genes\\t', len(valid_string_set)) response =", "None: return string_id_result.text print('error on {}: {}'.format(uniprot, data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing", "line in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score =", "with urllib.request.urlopen(website) as conn: data = conn.read().decode(\"utf-8\") except HTTPError: data = '' if", "if string_id_result is not None: return string_id_result.text print('error on {}: {}'.format(uniprot, data)) return", "conn: data = conn.read().decode(\"utf-8\") except HTTPError: data = '' if data: root =", "mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method = \"network\" request_url = \"/\".join([string_api_url,", "'r') as assay2target_file: assay2target_file.readline() for line in assay2target_file: line = line.strip().split('\\t') uniprot_list =", "with open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline() for line in assay2target_file: line = line.strip().split('\\t')", "as ET from multiprocessing import Pool from tqdm import tqdm from time import", "= p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0 with open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv', 'w')", "'w') as g: for uniprot, string_id in zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id))", "def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as conn: data = conn.read().decode(\"utf-8\")", "experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count += 1 if experimental_score > 0.2: pos_pair_count", "def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot to string to uniprot2string.tsv...') with Pool(args.n_proc) as", "as uniprot2string_file: valid_string_set = set() for line in uniprot2string_file: line = line.strip().split('\\t') uniprot", "import urllib.request import xml.etree.ElementTree as ET from multiprocessing import Pool from tqdm import", "string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot + '\\n') num_errors += 1 print('Done storing. Number", "len(valid_string_set)) response = requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count, pos_pair_count", "{}: {}'.format(uniprot, data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot to string", "this: https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number of processes to", "pair_count += 1 if experimental_score > 0.2: pos_pair_count += 1 print(pair_count, '\\t', pos_pair_count)", "= list(valid_string_set) params = { \"identifiers\": \"%0d\".join(valid_string_set), # your protein \"species\": 9606, #", "with Pool(args.n_proc) as p: string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0 with open('uniprot_without_strid.txt',", "return string_id_result.text print('error on {}: {}'.format(uniprot, data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping", "as r, open('uniprot2string.tsv', 'w') as g: for uniprot, string_id in zip(uniprot_set, string_id_set): if", "uniprot2string_file: valid_string_set = set() for line in uniprot2string_file: line = line.strip().split('\\t') uniprot =", "line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') # print(uniprot_list) for uniprot in uniprot_list: if len(uniprot) !=", "urllib import argparse import urllib.request import xml.etree.ElementTree as ET from multiprocessing import Pool", "print(uniprot_list) for uniprot in uniprot_list: if len(uniprot) != 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with", "species NCBI identifier } print('len of genes\\t', len(valid_string_set)) response = requests.post(request_url, data=params) print(response)", "when downloading assay & target information') args = parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url =", "xml.etree.ElementTree as ET from multiprocessing import Pool from tqdm import tqdm from time", "mapping from uniprot to string to uniprot2string.tsv...') with Pool(args.n_proc) as p: string_id_set =", "import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access ''' parser =", "string_id in zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot + '\\n') num_errors", "p: string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0 with open('uniprot_without_strid.txt', 'w') as r,", "response = requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count, pos_pair_count =", "requests import urllib import argparse import urllib.request import xml.etree.ElementTree as ET from multiprocessing", "g: for uniprot, string_id in zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot", "HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser()", "data: root = ET.fromstring(data) string_id_result = root.find('record/stringId') if string_id_result is not None: return", "6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set = set() for", "information') args = parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method", "set() for line in uniprot2string_file: line = line.strip().split('\\t') uniprot = line[0] string_id =", "pos_pair_count) print() def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as conn: data", "identifier } print('len of genes\\t', len(valid_string_set)) response = requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv',", "of processes to run when downloading assay & target information') args = parser.parse_args()", "check this: https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number of processes", "string_id_result is not None: return string_id_result.text print('error on {}: {}'.format(uniprot, data)) return ''", "print('error on {}: {}'.format(uniprot, data)) return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot", "try: with urllib.request.urlopen(website) as conn: data = conn.read().decode(\"utf-8\") except HTTPError: data = ''", "\"experimentally confirmed (prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count += 1", "= '' if data: root = ET.fromstring(data) string_id_result = root.find('record/stringId') if string_id_result is", "in zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot + '\\n') num_errors +=", "from multiprocessing import Pool from tqdm import tqdm from time import sleep from", "def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method = \"network\" request_url =", "list(valid_string_set) params = { \"identifiers\": \"%0d\".join(valid_string_set), # your protein \"species\": 9606, # species", "args = parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method =", "= \"tsv-no-header\" method = \"network\" request_url = \"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url) valid_string_set", "requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access ''' parser", "for uniprot, string_id in zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot +", "assay2target_file: line = line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') # print(uniprot_list) for uniprot in uniprot_list:", "output_format = \"tsv-no-header\" method = \"network\" request_url = \"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url)", "0 with open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv', 'w') as g: for uniprot, string_id", "uniprot in uniprot_list: if len(uniprot) != 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r')", "= { \"identifiers\": \"%0d\".join(valid_string_set), # your protein \"species\": 9606, # species NCBI identifier", "float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score),", "pandas as pd import requests import urllib import argparse import urllib.request import xml.etree.ElementTree", "return '' def store_mapping_from_uniprot_to_string_id(uniprot_set): print('Storing mapping from uniprot to string to uniprot2string.tsv...') with", "''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc',", "is not None: return string_id_result.text print('error on {}: {}'.format(uniprot, data)) return '' def", "# your protein \"species\": 9606, # species NCBI identifier } print('len of genes\\t',", "0 for line in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]), '{}'.format(l[1])", "parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method = \"network\" request_url", "1 if experimental_score > 0.2: pos_pair_count += 1 print(pair_count, '\\t', pos_pair_count) print() def", "for uniprot in uniprot_list: if len(uniprot) != 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv',", "tqdm(uniprot_set)) num_errors = 0 with open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv', 'w') as g:", "= \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method = \"network\" request_url = \"/\".join([string_api_url, output_format, method])", "urllib.request import xml.etree.ElementTree as ET from multiprocessing import Pool from tqdm import tqdm", "type=int, default=12, help='number of processes to run when downloading assay & target information')", "g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot + '\\n') num_errors += 1 print('Done storing. Number of", "\"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method = \"network\" request_url = \"/\".join([string_api_url, output_format, method]) print('request_url\\t',", "'__main__': ''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname = 'assay2target.tsv' uniprot_set =", "in assay2target_file: line = line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') # print(uniprot_list) for uniprot in", "ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname = 'assay2target.tsv' uniprot_set = set() with open(assay2target_fname, 'r')", "'assay2target.tsv' uniprot_set = set() with open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline() for line in", "import argparse import urllib.request import xml.etree.ElementTree as ET from multiprocessing import Pool from", "r.write(uniprot + '\\n') num_errors += 1 print('Done storing. Number of errors: {}. Mapped", "ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname = 'assay2target.tsv' uniprot_set = set() with open(assay2target_fname,", "parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number of processes to run when downloading", "string_api_url = \"https://version-11-0.string-db.org/api\" output_format = \"tsv-no-header\" method = \"network\" request_url = \"/\".join([string_api_url, output_format,", "except HTTPError: data = '' if data: root = ET.fromstring(data) string_id_result = root.find('record/stringId')", "import Pool from tqdm import tqdm from time import sleep from requests.models import", "+= 1 print(pair_count, '\\t', pos_pair_count) print() def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with", "Pool(args.n_proc) as p: string_id_set = p.map(query_stringid, tqdm(uniprot_set)) num_errors = 0 with open('uniprot_without_strid.txt', 'w')", "= set() with open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline() for line in assay2target_file: line", "else: r.write(uniprot + '\\n') num_errors += 1 print('Done storing. Number of errors: {}.", "line = line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') # print(uniprot_list) for uniprot in uniprot_list: if", "= 'assay2target.tsv' uniprot_set = set() with open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline() for line", "= 0 with open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv', 'w') as g: for uniprot,", "with open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count, pos_pair_count = 0, 0 for line in", "+= 1 if experimental_score > 0.2: pos_pair_count += 1 print(pair_count, '\\t', pos_pair_count) print()", "uniprot_list = line[-1].strip().split(',') # print(uniprot_list) for uniprot in uniprot_list: if len(uniprot) != 6:", "as assay2target_file: assay2target_file.readline() for line in assay2target_file: line = line.strip().split('\\t') uniprot_list = line[-1].strip().split(',')", "<filename>datasets/ChEMBL_STRING/step_02.py<gh_stars>0 import pandas as pd import requests import urllib import argparse import urllib.request", "http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12,", "string_ppi_file: pair_count, pos_pair_count = 0, 0 for line in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\")", "protein \"species\": 9606, # species NCBI identifier } print('len of genes\\t', len(valid_string_set)) response", "num_errors += 1 print('Done storing. Number of errors: {}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set)", "if experimental_score > 0.2: pos_pair_count += 1 print(pair_count, '\\t', pos_pair_count) print() def query_stringid(uniprot):", "# print(uniprot_list) for uniprot in uniprot_list: if len(uniprot) != 6: continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set)", "method = \"network\" request_url = \"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url) valid_string_set = list(valid_string_set)", "1 print('Done storing. Number of errors: {}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors))", "# print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file)", "set() with open(assay2target_fname, 'r') as assay2target_file: assay2target_file.readline() for line in assay2target_file: line =", "import urllib import argparse import urllib.request import xml.etree.ElementTree as ET from multiprocessing import", "= root.find('record/stringId') if string_id_result is not None: return string_id_result.text print('error on {}: {}'.format(uniprot,", "for line in assay2target_file: line = line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') # print(uniprot_list) for", "confirmed (prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count += 1 if", "root.find('record/stringId') if string_id_result is not None: return string_id_result.text print('error on {}: {}'.format(uniprot, data))", "= argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number of processes to run when downloading assay", "__name__ == '__main__': ''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname = 'assay2target.tsv'", "tqdm from time import sleep from requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2", "\"network\" request_url = \"/\".join([string_api_url, output_format, method]) print('request_url\\t', request_url) valid_string_set = list(valid_string_set) params =", "help='number of processes to run when downloading assay & target information') args =", "in response.text.strip().split(\"\\n\"): l = line.strip().split(\"\\t\") p1, p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10])", "assay2target_file.readline() for line in assay2target_file: line = line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') # print(uniprot_list)", "'w') as string_ppi_file: pair_count, pos_pair_count = 0, 0 for line in response.text.strip().split(\"\\n\"): l", "% experimental_score])) print('{}\\t{}\\t{}'.format(p1, p2, experimental_score), file=string_ppi_file) pair_count += 1 if experimental_score > 0.2:", "{}'.format(num_errors, len(uniprot_set) - num_errors)) if __name__ == '__main__': ''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt", "'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website) as conn: data = conn.read().decode(\"utf-8\") except HTTPError: data =", "# species NCBI identifier } print('len of genes\\t', len(valid_string_set)) response = requests.post(request_url, data=params)", "import tqdm from time import sleep from requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2", "- num_errors)) if __name__ == '__main__': ''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} '''", "assay2target_file: assay2target_file.readline() for line in assay2target_file: line = line.strip().split('\\t') uniprot_list = line[-1].strip().split(',') #", "= line[-1].strip().split(',') # print(uniprot_list) for uniprot in uniprot_list: if len(uniprot) != 6: continue", "Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname = 'assay2target.tsv' uniprot_set = set() with", "= ET.fromstring(data) string_id_result = root.find('record/stringId') if string_id_result is not None: return string_id_result.text print('error", "= set() for line in uniprot2string_file: line = line.strip().split('\\t') uniprot = line[0] string_id", "import requests import urllib import argparse import urllib.request import xml.etree.ElementTree as ET from", "NCBI identifier } print('len of genes\\t', len(valid_string_set)) response = requests.post(request_url, data=params) print(response) with", "uniprot, string_id in zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else: r.write(uniprot + '\\n')", "https://string-db.org/cgi/access ''' parser = argparse.ArgumentParser() parser.add_argument('--n-proc', type=int, default=12, help='number of processes to run", "as string_ppi_file: pair_count, pos_pair_count = 0, 0 for line in response.text.strip().split(\"\\n\"): l =", "''' Assay ID\\tTarget ID\\tTarget Name\\tOrganism\\t{UniProt list} ''' assay2target_fname = 'assay2target.tsv' uniprot_set = set()", "p1, p2 = '{}'.format(l[0]), '{}'.format(l[1]) experimental_score = float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally confirmed", "request_url) valid_string_set = list(valid_string_set) params = { \"identifiers\": \"%0d\".join(valid_string_set), # your protein \"species\":", "method]) print('request_url\\t', request_url) valid_string_set = list(valid_string_set) params = { \"identifiers\": \"%0d\".join(valid_string_set), # your", "1 print(pair_count, '\\t', pos_pair_count) print() def query_stringid(uniprot): website = 'https://version-11-0.string-db.org/api/xml/get_string_ids?identifiers={}'.format(uniprot) try: with urllib.request.urlopen(website)", "assay & target information') args = parser.parse_args() def mapping_to_string_API(valid_string_set): string_api_url = \"https://version-11-0.string-db.org/api\" output_format", "experimental_score = float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob. %.3f)\" % experimental_score])) print('{}\\t{}\\t{}'.format(p1,", "open('uniprot_without_strid.txt', 'w') as r, open('uniprot2string.tsv', 'w') as g: for uniprot, string_id in zip(uniprot_set,", "line[-1].strip().split(',') # print(uniprot_list) for uniprot in uniprot_list: if len(uniprot) != 6: continue uniprot_set.add(uniprot)", "params = { \"identifiers\": \"%0d\".join(valid_string_set), # your protein \"species\": 9606, # species NCBI", "storing. Number of errors: {}. Mapped uniprots: {}'.format(num_errors, len(uniprot_set) - num_errors)) if __name__", "> 0.2: pos_pair_count += 1 print(pair_count, '\\t', pos_pair_count) print() def query_stringid(uniprot): website =", "from requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters] check this: https://string-db.org/cgi/access '''", "{ \"identifiers\": \"%0d\".join(valid_string_set), # your protein \"species\": 9606, # species NCBI identifier }", "from time import sleep from requests.models import HTTPError ''' http://www.uniprot.org/uniprot/O75713 http://www.uniprot.org/uniprot/D3DTF2 https://string-db.org/api/tsv/get_string_ids?identifiers=D3DTF2 https://string-db.org/api/json/network?identifiers=[your_identifiers]&[optional_parameters]", "continue uniprot_set.add(uniprot) store_mapping_from_uniprot_to_string_id(uniprot_set) with open('uniprot2string.tsv', 'r') as uniprot2string_file: valid_string_set = set() for line", "Name\\tOrganism\\t{UniProt list} ''' assay2target_fname = 'assay2target.tsv' uniprot_set = set() with open(assay2target_fname, 'r') as", "HTTPError: data = '' if data: root = ET.fromstring(data) string_id_result = root.find('record/stringId') if", "genes\\t', len(valid_string_set)) response = requests.post(request_url, data=params) print(response) with open('string_ppi_score.tsv', 'w') as string_ppi_file: pair_count,", "'{}'.format(l[1]) experimental_score = float(l[10]) # print(\"\\t\".join([p1, p2, \"experimentally confirmed (prob. %.3f)\" % experimental_score]))", "as g: for uniprot, string_id in zip(uniprot_set, string_id_set): if string_id: g.write('{}\\t{}\\n'.format(uniprot, string_id)) else:", "\"identifiers\": \"%0d\".join(valid_string_set), # your protein \"species\": 9606, # species NCBI identifier } print('len" ]
[ "one def union(subsets, u, v): if subsets[u].rank > subsets[v].rank: subsets[v].parent = u elif", "# The structure to represent a subset class Subset: def __init__(self, parent, rank):", "find(subsets, subsets[node].parent) return subsets[node].parent # Check if there is an cycle in the", "graph class Graph: def __init__(self, vertices): self.V = vertices self.edges = defaultdict(list) def", "g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3, 5) if is_cycle(g): print('Cycle') else: print('Not", "subsets.append(Subset(u, 0)) # Iterate over all edges of the graph # If the", "union(subsets, x, y) def main(): g = Graph(6) g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1,", "bigger rank becomes the parent of the smaller one # If both ranks", "i) for j in graph.edges[i]: y = find(subsets, j) if x == y:", "node): if subsets[node].parent != node: subsets[node].parent = find(subsets, subsets[node].parent) return subsets[node].parent # Check", "Subset: def __init__(self, parent, rank): self.parent = parent self.rank = rank # This", "parent self.rank = rank # This function unite sets # The bigger rank", "Iterate over all edges of the graph # If the parents of both", "over all edges of the graph # If the parents of both vertices", "in graph.edges[i]: y = find(subsets, j) if x == y: return True union(subsets,", "one as parent of the other # and increment its rank by one", "add_edge(self, u, v): self.edges[u].append(v) # The structure to represent a subset class Subset:", "parent, rank): self.parent = parent self.rank = rank # This function unite sets", "the parent of the smaller one # If both ranks are the same", "ranks are the same then make one as parent of the other #", "an cycle in the graph def is_cycle(graph): subsets = [] for u in", "self.edges[u].append(v) # The structure to represent a subset class Subset: def __init__(self, parent,", "subsets[v].parent = u elif subsets[v].rank > subsets[u].rank: subsets[u].parent = v else: subsets[v].parent =", "= find(subsets, subsets[node].parent) return subsets[node].parent # Check if there is an cycle in", "The structure to represent the graph class Graph: def __init__(self, vertices): self.V =", "subsets[node].parent != node: subsets[node].parent = find(subsets, subsets[node].parent) return subsets[node].parent # Check if there", "g.add_edge(3, 5) if is_cycle(g): print('Cycle') else: print('Not Cycle') if __name__ == '__main__': main()", "self.V = vertices self.edges = defaultdict(list) def add_edge(self, u, v): self.edges[u].append(v) # The", "compression if needed def find(subsets, node): if subsets[node].parent != node: subsets[node].parent = find(subsets,", "# If both ranks are the same then make one as parent of", "set's parent and make the path compression if needed def find(subsets, node): if", "# Then there is a cycle for i in graph.edges: x = find(subsets,", "__init__(self, vertices): self.V = vertices self.edges = defaultdict(list) def add_edge(self, u, v): self.edges[u].append(v)", "of both vertices are the same # Then there is a cycle for", "def __init__(self, parent, rank): self.parent = parent self.rank = rank # This function", "= v else: subsets[v].parent = u subsets[u].rank += 1 # Find the set's", "subsets[v].parent = u subsets[u].rank += 1 # Find the set's parent and make", "for j in graph.edges[i]: y = find(subsets, j) if x == y: return", "there is an cycle in the graph def is_cycle(graph): subsets = [] for", "the other # and increment its rank by one def union(subsets, u, v):", "subsets[node].parent) return subsets[node].parent # Check if there is an cycle in the graph", "True union(subsets, x, y) def main(): g = Graph(6) g.add_edge(0, 1) g.add_edge(0, 4)", "g = Graph(6) g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2, 3)", "both ranks are the same then make one as parent of the other", "4) g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3, 5) if is_cycle(g):", "2) g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3, 5) if is_cycle(g): print('Cycle') else:", "as parent of the other # and increment its rank by one def", "= Graph(6) g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3,", "a subset class Subset: def __init__(self, parent, rank): self.parent = parent self.rank =", "parents of both vertices are the same # Then there is a cycle", "cycle in the graph def is_cycle(graph): subsets = [] for u in range(graph.V):", "parent of the smaller one # If both ranks are the same then", "the graph # If the parents of both vertices are the same #", "in the graph def is_cycle(graph): subsets = [] for u in range(graph.V): subsets.append(Subset(u,", "vertices are the same # Then there is a cycle for i in", "rank): self.parent = parent self.rank = rank # This function unite sets #", "there is a cycle for i in graph.edges: x = find(subsets, i) for", "4) g.add_edge(3, 5) if is_cycle(g): print('Cycle') else: print('Not Cycle') if __name__ == '__main__':", "def add_edge(self, u, v): self.edges[u].append(v) # The structure to represent a subset class", "self.parent = parent self.rank = rank # This function unite sets # The", "make one as parent of the other # and increment its rank by", "Check if there is an cycle in the graph def is_cycle(graph): subsets =", "structure to represent the graph class Graph: def __init__(self, vertices): self.V = vertices", "If both ranks are the same then make one as parent of the", "then make one as parent of the other # and increment its rank", "= defaultdict(list) def add_edge(self, u, v): self.edges[u].append(v) # The structure to represent a", "make the path compression if needed def find(subsets, node): if subsets[node].parent != node:", "and increment its rank by one def union(subsets, u, v): if subsets[u].rank >", "the graph class Graph: def __init__(self, vertices): self.V = vertices self.edges = defaultdict(list)", "subset class Subset: def __init__(self, parent, rank): self.parent = parent self.rank = rank", "subsets[u].rank += 1 # Find the set's parent and make the path compression", "u subsets[u].rank += 1 # Find the set's parent and make the path", "x, y) def main(): g = Graph(6) g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1, 2)", "subsets = [] for u in range(graph.V): subsets.append(Subset(u, 0)) # Iterate over all", "graph.edges: x = find(subsets, i) for j in graph.edges[i]: y = find(subsets, j)", "rank becomes the parent of the smaller one # If both ranks are", "rank by one def union(subsets, u, v): if subsets[u].rank > subsets[v].rank: subsets[v].parent =", "increment its rank by one def union(subsets, u, v): if subsets[u].rank > subsets[v].rank:", "same # Then there is a cycle for i in graph.edges: x =", "the smaller one # If both ranks are the same then make one", "def union(subsets, u, v): if subsets[u].rank > subsets[v].rank: subsets[v].parent = u elif subsets[v].rank", "j in graph.edges[i]: y = find(subsets, j) if x == y: return True", "= find(subsets, j) if x == y: return True union(subsets, x, y) def", "subsets[v].rank > subsets[u].rank: subsets[u].parent = v else: subsets[v].parent = u subsets[u].rank += 1", "v): self.edges[u].append(v) # The structure to represent a subset class Subset: def __init__(self,", "graph # If the parents of both vertices are the same # Then", "of the other # and increment its rank by one def union(subsets, u,", "v else: subsets[v].parent = u subsets[u].rank += 1 # Find the set's parent", "self.edges = defaultdict(list) def add_edge(self, u, v): self.edges[u].append(v) # The structure to represent", "3) g.add_edge(3, 4) g.add_edge(3, 5) if is_cycle(g): print('Cycle') else: print('Not Cycle') if __name__", "defaultdict # The structure to represent the graph class Graph: def __init__(self, vertices):", "represent a subset class Subset: def __init__(self, parent, rank): self.parent = parent self.rank", "u, v): self.edges[u].append(v) # The structure to represent a subset class Subset: def", "= [] for u in range(graph.V): subsets.append(Subset(u, 0)) # Iterate over all edges", "y: return True union(subsets, x, y) def main(): g = Graph(6) g.add_edge(0, 1)", "the parents of both vertices are the same # Then there is a", "# If the parents of both vertices are the same # Then there", "v): if subsets[u].rank > subsets[v].rank: subsets[v].parent = u elif subsets[v].rank > subsets[u].rank: subsets[u].parent", "# Find the set's parent and make the path compression if needed def", "its rank by one def union(subsets, u, v): if subsets[u].rank > subsets[v].rank: subsets[v].parent", "subsets[v].rank: subsets[v].parent = u elif subsets[v].rank > subsets[u].rank: subsets[u].parent = v else: subsets[v].parent", "+= 1 # Find the set's parent and make the path compression if", "g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3,", "is an cycle in the graph def is_cycle(graph): subsets = [] for u", "g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3, 5) if is_cycle(g): print('Cycle') else: print('Not Cycle') if", "to represent a subset class Subset: def __init__(self, parent, rank): self.parent = parent", "self.rank = rank # This function unite sets # The bigger rank becomes", "return subsets[node].parent # Check if there is an cycle in the graph def", "import defaultdict # The structure to represent the graph class Graph: def __init__(self,", "1 # Find the set's parent and make the path compression if needed", "# Iterate over all edges of the graph # If the parents of", "graph.edges[i]: y = find(subsets, j) if x == y: return True union(subsets, x,", "if there is an cycle in the graph def is_cycle(graph): subsets = []", "function unite sets # The bigger rank becomes the parent of the smaller", "= find(subsets, i) for j in graph.edges[i]: y = find(subsets, j) if x", "j) if x == y: return True union(subsets, x, y) def main(): g", "def __init__(self, vertices): self.V = vertices self.edges = defaultdict(list) def add_edge(self, u, v):", "elif subsets[v].rank > subsets[u].rank: subsets[u].parent = v else: subsets[v].parent = u subsets[u].rank +=", "find(subsets, node): if subsets[node].parent != node: subsets[node].parent = find(subsets, subsets[node].parent) return subsets[node].parent #", "subsets[u].rank: subsets[u].parent = v else: subsets[v].parent = u subsets[u].rank += 1 # Find", "of the graph # If the parents of both vertices are the same", "> subsets[u].rank: subsets[u].parent = v else: subsets[v].parent = u subsets[u].rank += 1 #", "> subsets[v].rank: subsets[v].parent = u elif subsets[v].rank > subsets[u].rank: subsets[u].parent = v else:", "graph def is_cycle(graph): subsets = [] for u in range(graph.V): subsets.append(Subset(u, 0)) #", "sets # The bigger rank becomes the parent of the smaller one #", "u in range(graph.V): subsets.append(Subset(u, 0)) # Iterate over all edges of the graph", "path compression if needed def find(subsets, node): if subsets[node].parent != node: subsets[node].parent =", "# Check if there is an cycle in the graph def is_cycle(graph): subsets", "represent the graph class Graph: def __init__(self, vertices): self.V = vertices self.edges =", "= u subsets[u].rank += 1 # Find the set's parent and make the", "This function unite sets # The bigger rank becomes the parent of the", "[] for u in range(graph.V): subsets.append(Subset(u, 0)) # Iterate over all edges of", "Then there is a cycle for i in graph.edges: x = find(subsets, i)", "= rank # This function unite sets # The bigger rank becomes the", "parent and make the path compression if needed def find(subsets, node): if subsets[node].parent", "the graph def is_cycle(graph): subsets = [] for u in range(graph.V): subsets.append(Subset(u, 0))", "g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3, 5) if", "in range(graph.V): subsets.append(Subset(u, 0)) # Iterate over all edges of the graph #", "If the parents of both vertices are the same # Then there is", "subsets[u].rank > subsets[v].rank: subsets[v].parent = u elif subsets[v].rank > subsets[u].rank: subsets[u].parent = v", "one # If both ranks are the same then make one as parent", "vertices): self.V = vertices self.edges = defaultdict(list) def add_edge(self, u, v): self.edges[u].append(v) #", "find(subsets, i) for j in graph.edges[i]: y = find(subsets, j) if x ==", "the set's parent and make the path compression if needed def find(subsets, node):", "class Graph: def __init__(self, vertices): self.V = vertices self.edges = defaultdict(list) def add_edge(self,", "x == y: return True union(subsets, x, y) def main(): g = Graph(6)", "subsets[u].parent = v else: subsets[v].parent = u subsets[u].rank += 1 # Find the", "are the same then make one as parent of the other # and", "union(subsets, u, v): if subsets[u].rank > subsets[v].rank: subsets[v].parent = u elif subsets[v].rank >", "is_cycle(graph): subsets = [] for u in range(graph.V): subsets.append(Subset(u, 0)) # Iterate over", "are the same # Then there is a cycle for i in graph.edges:", "to represent the graph class Graph: def __init__(self, vertices): self.V = vertices self.edges", "0)) # Iterate over all edges of the graph # If the parents", "The structure to represent a subset class Subset: def __init__(self, parent, rank): self.parent", "g.add_edge(3, 4) g.add_edge(3, 5) if is_cycle(g): print('Cycle') else: print('Not Cycle') if __name__ ==", "else: subsets[v].parent = u subsets[u].rank += 1 # Find the set's parent and", "= vertices self.edges = defaultdict(list) def add_edge(self, u, v): self.edges[u].append(v) # The structure", "<gh_stars>0 from collections import defaultdict # The structure to represent the graph class", "class Subset: def __init__(self, parent, rank): self.parent = parent self.rank = rank #", "# and increment its rank by one def union(subsets, u, v): if subsets[u].rank", "if needed def find(subsets, node): if subsets[node].parent != node: subsets[node].parent = find(subsets, subsets[node].parent)", "4) g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3, 5) if is_cycle(g): print('Cycle') else: print('Not Cycle')", "all edges of the graph # If the parents of both vertices are", "return True union(subsets, x, y) def main(): g = Graph(6) g.add_edge(0, 1) g.add_edge(0,", "the same then make one as parent of the other # and increment", "find(subsets, j) if x == y: return True union(subsets, x, y) def main():", "a cycle for i in graph.edges: x = find(subsets, i) for j in", "__init__(self, parent, rank): self.parent = parent self.rank = rank # This function unite", "cycle for i in graph.edges: x = find(subsets, i) for j in graph.edges[i]:", "by one def union(subsets, u, v): if subsets[u].rank > subsets[v].rank: subsets[v].parent = u", "Graph(6) g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3, 4)", "if subsets[u].rank > subsets[v].rank: subsets[v].parent = u elif subsets[v].rank > subsets[u].rank: subsets[u].parent =", "= parent self.rank = rank # This function unite sets # The bigger", "Graph: def __init__(self, vertices): self.V = vertices self.edges = defaultdict(list) def add_edge(self, u,", "u elif subsets[v].rank > subsets[u].rank: subsets[u].parent = v else: subsets[v].parent = u subsets[u].rank", "range(graph.V): subsets.append(Subset(u, 0)) # Iterate over all edges of the graph # If", "y) def main(): g = Graph(6) g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1,", "node: subsets[node].parent = find(subsets, subsets[node].parent) return subsets[node].parent # Check if there is an", "subsets[node].parent # Check if there is an cycle in the graph def is_cycle(graph):", "defaultdict(list) def add_edge(self, u, v): self.edges[u].append(v) # The structure to represent a subset", "= u elif subsets[v].rank > subsets[u].rank: subsets[u].parent = v else: subsets[v].parent = u", "== y: return True union(subsets, x, y) def main(): g = Graph(6) g.add_edge(0,", "collections import defaultdict # The structure to represent the graph class Graph: def", "becomes the parent of the smaller one # If both ranks are the", "edges of the graph # If the parents of both vertices are the", "y = find(subsets, j) if x == y: return True union(subsets, x, y)", "subsets[node].parent = find(subsets, subsets[node].parent) return subsets[node].parent # Check if there is an cycle", "def main(): g = Graph(6) g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1, 4)", "u, v): if subsets[u].rank > subsets[v].rank: subsets[v].parent = u elif subsets[v].rank > subsets[u].rank:", "# The structure to represent the graph class Graph: def __init__(self, vertices): self.V", "if subsets[node].parent != node: subsets[node].parent = find(subsets, subsets[node].parent) return subsets[node].parent # Check if", "Find the set's parent and make the path compression if needed def find(subsets,", "same then make one as parent of the other # and increment its", "the same # Then there is a cycle for i in graph.edges: x", "in graph.edges: x = find(subsets, i) for j in graph.edges[i]: y = find(subsets,", "if x == y: return True union(subsets, x, y) def main(): g =", "unite sets # The bigger rank becomes the parent of the smaller one", "rank # This function unite sets # The bigger rank becomes the parent", "x = find(subsets, i) for j in graph.edges[i]: y = find(subsets, j) if", "and make the path compression if needed def find(subsets, node): if subsets[node].parent !=", "for i in graph.edges: x = find(subsets, i) for j in graph.edges[i]: y", "i in graph.edges: x = find(subsets, i) for j in graph.edges[i]: y =", "vertices self.edges = defaultdict(list) def add_edge(self, u, v): self.edges[u].append(v) # The structure to", "needed def find(subsets, node): if subsets[node].parent != node: subsets[node].parent = find(subsets, subsets[node].parent) return", "parent of the other # and increment its rank by one def union(subsets,", "!= node: subsets[node].parent = find(subsets, subsets[node].parent) return subsets[node].parent # Check if there is", "g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3, 5) if is_cycle(g): print('Cycle')", "smaller one # If both ranks are the same then make one as", "is a cycle for i in graph.edges: x = find(subsets, i) for j", "# The bigger rank becomes the parent of the smaller one # If", "def find(subsets, node): if subsets[node].parent != node: subsets[node].parent = find(subsets, subsets[node].parent) return subsets[node].parent", "for u in range(graph.V): subsets.append(Subset(u, 0)) # Iterate over all edges of the", "def is_cycle(graph): subsets = [] for u in range(graph.V): subsets.append(Subset(u, 0)) # Iterate", "The bigger rank becomes the parent of the smaller one # If both", "of the smaller one # If both ranks are the same then make", "other # and increment its rank by one def union(subsets, u, v): if", "both vertices are the same # Then there is a cycle for i", "the path compression if needed def find(subsets, node): if subsets[node].parent != node: subsets[node].parent", "1) g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2, 3) g.add_edge(3, 4) g.add_edge(3, 5)", "main(): g = Graph(6) g.add_edge(0, 1) g.add_edge(0, 4) g.add_edge(1, 2) g.add_edge(1, 4) g.add_edge(2,", "# This function unite sets # The bigger rank becomes the parent of", "structure to represent a subset class Subset: def __init__(self, parent, rank): self.parent =", "from collections import defaultdict # The structure to represent the graph class Graph:" ]
[ "Bio import cleaning from decimal import Decimal from symbols import * #from pdbtools", "for i in range(len(placed)): final[i, 0] = placed[i][1][0] final[i, 1] = placed[i][1][1] final[i,", "peptides # stores a dictionary with peptide keys that map to the atoms", "sequence to the possible sequences dictionary if len(current_sequence) == length: global POSSIBLE_SEQUENCES if", "encoding['sequence'] + '.pdb' placed = [] new_nodes = remove_padding(encoding['index']) if not use_coord: D", "amino + '.pdb') cleaning.cleanATOM(d + '/amino_acids/' + amino + '.pdb', out_file= d +", "not use_coord: D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2 -", "Decimal from symbols import * #from pdbtools import pdbtools as pdb d =", "i in range(len(values)): if max_value == values[i]: amino = AA[i] find_sequence_recurs(nodes, length, current_ind", "as np import prody as pd import PeptideBuilder as pb import os import", "+ str(i+1) + ' ' string += encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string +=", "placed = [] new_nodes = remove_padding(encoding['index']) if not use_coord: D = encoding['secondary'] placed.append([new_nodes[0],", "else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values = [] for a in AA: values.append(heuristic(current_ind,nodes, a))", "+ placed[i][0] g.write(string + '\\n') counter -= 1 return save_loc + '/' +", "(x,y,0)]) P = placed[2][1][0]**2 + placed[2][1][1]**2 for i in range(3,len(new_nodes)): x = (D[1,i]**2", "find_white_space(7, str(i + 1)) + str(i+1) + ' ' string += encoding['ele_to_amino'][i][0] +", "like? def decode(encoding, save_loc = d, save_name = '', find_coord = False, use_coord", "- P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 - x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)])", "' '+ encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i + 1)) + str(i+1) + '", "11:41:59 2019 @author: gemsec-user \"\"\" import numpy as np import prody as pd", "adds the given value and sequence to the possible sequences dictionary if len(current_sequence)", "parses peptides and creates file structures to store these peptides # stores a", "i in range(3,len(new_nodes)): x = (D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2", "amino in AA: # out writes information to files out = Bio.PDB.PDBIO() #", "AA[i] find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]), current_sequence + amino, current_value + max_value) #", "atoms parsed_aa[amino] = [] for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are nodes?", "1] == atom[0]: correct += 1 total += 1 return float(correct/total) # finds", "= 0 total = 0 for atom in parsed_aa[amino_acid]: if (index+total) < len(node)", "import Bio import cleaning from decimal import Decimal from symbols import * #from", "efficiency def heuristic(index, node, amino_acid): correct = 0 total = 0 for atom", "amino, current_value + max_value) # returns a string of whitespace specified def find_white_space(total_space,", "placed[i][1][0] final[i, 1] = placed[i][1][1] final[i, 2] = placed[i][1][2] return final else: for", "gets the currrent column of the first 5 rows col = nodes[0:5, current]", "final = np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)): final[i, 0] = placed[i][1][0] final[i, 1]", "parsed_aa = {} # parses peptides and creates file structures to store these", "amino_acid): correct = 0 total = 0 for atom in parsed_aa[amino_acid]: if (index+total)", "nodes[0:5, current] while sum(col) != 0: # adds the element index of the", "in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are nodes? (2d array) # returns the atoms", "= os.getcwd() parsed_aa = {} # parses peptides and creates file structures to", "D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1]) y = (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P", "to their atoms parsed_aa[amino] = [] for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what", "parsed_aa[amino] = [] for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are nodes? (2d", "= [] current = 0 # gets the currrent column of the first", "returns a string of whitespace specified def find_white_space(total_space, text): return ' '*(total_space -", "as g: counter = 0 amino_num = 0 for i in range(len(placed)): if", "current_ind + len(parsed_aa[amino]), current_sequence + amino, current_value + max_value) # returns a string", "creates file structures to store these peptides # stores a dictionary with peptide", "that map to the atoms that make it up def parse_aa(): if not", "placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1])", "1 string = 'ATOM' #+ str(i + 1) + ' '+ encoding['seq_to_atoms'][i][0] string", "atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col = nodes[0:5, current] return atoms # checks the", "y = (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2 + placed[2][1][1]**2 for", "parsed_aa[amino].append(str(atom.getName())) # what are nodes? (2d array) # returns the atoms from the", "find_white_space(total_space, text): return ' '*(total_space - len(text)) POSSIBLE_SEQUENCES = None # what are", "what does encoding look like? def decode(encoding, save_loc = d, save_name = '',", "save_name, 'w+') as g: counter = 0 amino_num = 0 for i in", "the atoms from the given nodes def remove_padding(nodes): atoms = [] current =", "currrent column of the first 5 rows col = nodes[0:5, current] while sum(col)", "atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are nodes? (2d array) # returns the", "the given value and sequence to the possible sequences dictionary if len(current_sequence) ==", "value and sequence to the possible sequences dictionary if len(current_sequence) == length: global", "i in range(len(placed)): final[i, 0] = placed[i][1][0] final[i, 1] = placed[i][1][1] final[i, 2]", "parsed_aa[amino_acid]: if (index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]: correct +=", "keys that map to the atoms that make it up def parse_aa(): if", "are nodes? # decodes information into a pdb file # what does encoding", "decimal import Decimal from symbols import * #from pdbtools import pdbtools as pdb", "structures to store these peptides # stores a dictionary with peptide keys that", "# i is a peptide structure from amino acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino))", "+ amino + '.pdb') cleaning.cleanATOM(d + '/amino_acids/' + amino + '.pdb', out_file= d", "(0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1]) y", "+= find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) +", "of the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col = nodes[0:5, current]", "string += find_white_space(11, placed[i][0]) + placed[i][0] g.write(string + '\\n') counter -= 1 return", "Aug 1 11:41:59 2019 @author: gemsec-user \"\"\" import numpy as np import prody", "' '*(total_space - len(text)) POSSIBLE_SEQUENCES = None # what are nodes? # decodes", "'/' + save_name, 'w+') as g: counter = 0 amino_num = 0 for", "range(len(placed)): if counter == 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string =", "correct = 0 total = 0 for atom in parsed_aa[amino_acid]: if (index+total) <", "P = placed[2][1][0]**2 + placed[2][1][1]**2 for i in range(3,len(new_nodes)): x = (D[1,i]**2 -", "= [] for a in AA: values.append(heuristic(current_ind,nodes, a)) max_value = max(values) if max_value", "cleaning.cleanATOM(d + '/amino_acids/' + amino + '.pdb', out_file= d + '/amino_acids/' + amino", "' ' string += encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + '", "coding: utf-8 -*- \"\"\" Created on Thu Aug 1 11:41:59 2019 @author: gemsec-user", "out = Bio.PDB.PDBIO() # i is a peptide structure from amino acid i", "save_name = '', find_coord = False, use_coord = False): if len(parsed_aa.keys()) == 0:", "a in AA: values.append(heuristic(current_ind,nodes, a)) max_value = max(values) if max_value > 0.8: for", "+ (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 - x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord:", "if max_value > 0.8: for i in range(len(values)): if max_value == values[i]: amino", "- len(text)) POSSIBLE_SEQUENCES = None # what are nodes? # decodes information into", "= '', find_coord = False, use_coord = False): if len(parsed_aa.keys()) == 0: parse_aa()", "(D[0,1],0,0)]) x = (D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1]) y = (abs(D[0,2]**2", "string += find_white_space(4, str(amino_num)) + str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]),", "sum(col) != 0: # adds the element index of the current node column", "atoms # checks the rate of correctness in heuristic efficiency def heuristic(index, node,", "decodes information into a pdb file # what does encoding look like? def", "g: counter = 0 amino_num = 0 for i in range(len(placed)): if counter", "0 for atom in parsed_aa[amino_acid]: if (index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1]", "+ '.pdb', out_file= d + '/amino_acids/' + amino + '.pdb', ext = '.pdb')", "# -*- coding: utf-8 -*- \"\"\" Created on Thu Aug 1 11:41:59 2019", "= 0 # gets the currrent column of the first 5 rows col", "atoms = [] current = 0 # gets the currrent column of the", "= max(values) if max_value > 0.8: for i in range(len(values)): if max_value ==", "'.pdb' placed = [] new_nodes = remove_padding(encoding['index']) if not use_coord: D = encoding['secondary']", "+ ' ' string += encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] +", "use_coord: D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2 - D[0,2]**2", "from symbols import * #from pdbtools import pdbtools as pdb d = os.getcwd()", "find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string += find_white_space(4, str(amino_num)) +", "'', find_coord = False, use_coord = False): if len(parsed_aa.keys()) == 0: parse_aa() if", "a peptide structure from amino acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d +", "len(parsed_aa[amino]), current_sequence + amino, current_value + max_value) # returns a string of whitespace", "else: for i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/' + save_name,", "+ ' '+ encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i + 1)) + str(i+1) +", "+ ' A' string += find_white_space(4, str(amino_num)) + str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]),", "from amino acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/' + amino", "= placed[2][1][0]**2 + placed[2][1][1]**2 for i in range(3,len(new_nodes)): x = (D[1,i]**2 - D[0,i]**2", "for amino in AA: # out writes information to files out = Bio.PDB.PDBIO()", "False, use_coord = False): if len(parsed_aa.keys()) == 0: parse_aa() if save_name == '':", "prody as pd import PeptideBuilder as pb import os import Bio import cleaning", "import numpy as np import prody as pd import PeptideBuilder as pb import", "find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]),", "D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 - D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2", "encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i + 1)) + str(i+1) + ' ' string", "of amino acid sequences keyed to heuristic efficiency values def find_sequence_recurs(nodes, length, current_ind,", "find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]),", "d + '/amino_acids/' + amino + '.pdb', ext = '.pdb') temp = pd.parsePDB(d", "+= find_white_space(11, placed[i][0]) + placed[i][0] g.write(string + '\\n') counter -= 1 return save_loc", "file # what does encoding look like? def decode(encoding, save_loc = d, save_name", "use_coord = False): if len(parsed_aa.keys()) == 0: parse_aa() if save_name == '': save_name", "'/amino_acids') global parsed_aa for amino in AA: # out writes information to files", "placed.append([new_nodes[i], (x,y,z)]) if find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)): final[i, 0]", "def find_white_space(total_space, text): return ' '*(total_space - len(text)) POSSIBLE_SEQUENCES = None # what", "np import prody as pd import PeptideBuilder as pb import os import Bio", "x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for i in", "sequences dictionary if len(current_sequence) == length: global POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence)", "specified def find_white_space(total_space, text): return ' '*(total_space - len(text)) POSSIBLE_SEQUENCES = None #", "file structures to store these peptides # stores a dictionary with peptide keys", "find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3)) string += ' 1.00 0.00' string +=", "of correctness in heuristic efficiency def heuristic(index, node, amino_acid): correct = 0 total", "placed[2][1][1]**2 for i in range(3,len(new_nodes)): x = (D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y", "+ amino + \".pdb\") # maps amino acids to their atoms parsed_aa[amino] =", "= (abs(D[0,i]**2 - x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord: final = np.zeros((len(encoding['secondary'][0]),3))", "utf-8 -*- \"\"\" Created on Thu Aug 1 11:41:59 2019 @author: gemsec-user \"\"\"", "out_file= d + '/amino_acids/' + amino + '.pdb', ext = '.pdb') temp =", "parse_aa() # adds the given value and sequence to the possible sequences dictionary", "str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8,", "with peptide keys that map to the atoms that make it up def", "counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string = 'ATOM' #+ str(i + 1)", "= (D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 - D[0,i]**2 - P", "remove_padding(encoding['index']) if not use_coord: D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x =", "pdb file # what does encoding look like? def decode(encoding, save_loc = d,", "heuristic efficiency values def find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value): if len(parsed_aa.keys()) == 0:", "z = (abs(D[0,i]**2 - x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord: final =", "np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)): final[i, 0] = placed[i][1][0] final[i, 1] = placed[i][1][1]", "acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/' + amino + '.pdb')", "+ amino + '.pdb', ext = '.pdb') temp = pd.parsePDB(d + '/amino_acids/' +", "heuristic efficiency def heuristic(index, node, amino_acid): correct = 0 total = 0 for", "pd import PeptideBuilder as pb import os import Bio import cleaning from decimal", "# checks the rate of correctness in heuristic efficiency def heuristic(index, node, amino_acid):", "ext = '.pdb') temp = pd.parsePDB(d + '/amino_acids/' + amino + \".pdb\") #", "x = (D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 - D[0,i]**2 -", "(abs(D[0,i]**2 - x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for", "are nodes? (2d array) # returns the atoms from the given nodes def", "# adds the element index of the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current +=", "temp = pd.parsePDB(d + '/amino_acids/' + amino + \".pdb\") # maps amino acids", "the element index of the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col", "os.getcwd() parsed_aa = {} # parses peptides and creates file structures to store", "3))) + str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3)) string", "return ' '*(total_space - len(text)) POSSIBLE_SEQUENCES = None # what are nodes? #", "total += 1 return float(correct/total) # finds all possible sequences of amino acid", "def find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value): if len(parsed_aa.keys()) == 0: parse_aa() # adds", "AA: values.append(heuristic(current_ind,nodes, a)) max_value = max(values) if max_value > 0.8: for i in", "column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col = nodes[0:5, current] return atoms # checks", "str(i+1) + ' ' string += encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])]", "POSSIBLE_SEQUENCES[current_value] = [current_sequence] values = [] for a in AA: values.append(heuristic(current_ind,nodes, a)) max_value", "def heuristic(index, node, amino_acid): correct = 0 total = 0 for atom in", "str(i + 1)) + str(i+1) + ' ' string += encoding['ele_to_amino'][i][0] + find_white_space(4,", "len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]: correct += 1 total += 1", "' A' string += find_white_space(4, str(amino_num)) + str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3)))", "placed[i][1][1] final[i, 2] = placed[i][1][2] return final else: for i in range(3,len(new_nodes)): placed.append([new_nodes[i],", "+ len(parsed_aa[amino]), current_sequence + amino, current_value + max_value) # returns a string of", "nodes[0:5, current] return atoms # checks the rate of correctness in heuristic efficiency", "2] = placed[i][1][2] return final else: for i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with", "that make it up def parse_aa(): if not os.path.exists(d + '/amino_acids'): os.mkdir(d +", "if find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)): final[i, 0] = placed[i][1][0]", "first 5 rows col = nodes[0:5, current] while sum(col) != 0: # adds", "max_value = max(values) if max_value > 0.8: for i in range(len(values)): if max_value", "# parses peptides and creates file structures to store these peptides # stores", "these peptides # stores a dictionary with peptide keys that map to the", "import prody as pd import PeptideBuilder as pb import os import Bio import", "peptides and creates file structures to store these peptides # stores a dictionary", "1 total += 1 return float(correct/total) # finds all possible sequences of amino", "'.pdb', ext = '.pdb') temp = pd.parsePDB(d + '/amino_acids/' + amino + \".pdb\")", "3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3)) string += ' 1.00", "and creates file structures to store these peptides # stores a dictionary with", "1] = placed[i][1][1] final[i, 2] = placed[i][1][2] return final else: for i in", "+ max_value) # returns a string of whitespace specified def find_white_space(total_space, text): return", "maps amino acids to their atoms parsed_aa[amino] = [] for atom in temp.iterAtoms():", "#from pdbtools import pdbtools as pdb d = os.getcwd() parsed_aa = {} #", "os.mkdir(d + '/amino_acids') global parsed_aa for amino in AA: # out writes information", "[] for a in AA: values.append(heuristic(current_ind,nodes, a)) max_value = max(values) if max_value >", "parse_aa() if save_name == '': save_name = encoding['sequence'] + '.pdb' placed = []", "'': save_name = encoding['sequence'] + '.pdb' placed = [] new_nodes = remove_padding(encoding['index']) if", "= encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2", "encoding look like? def decode(encoding, save_loc = d, save_name = '', find_coord =", "range(len(placed)): final[i, 0] = placed[i][1][0] final[i, 1] = placed[i][1][1] final[i, 2] = placed[i][1][2]", "a string of whitespace specified def find_white_space(total_space, text): return ' '*(total_space - len(text))", "if len(current_sequence) == length: global POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value]", "POSSIBLE_SEQUENCES = None # what are nodes? # decodes information into a pdb", "== '': save_name = encoding['sequence'] + '.pdb' placed = [] new_nodes = remove_padding(encoding['index'])", "(encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/' + save_name, 'w+') as g: counter = 0", "= False): if len(parsed_aa.keys()) == 0: parse_aa() if save_name == '': save_name =", "if max_value == values[i]: amino = AA[i] find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]), current_sequence", "pb import os import Bio import cleaning from decimal import Decimal from symbols", "text): return ' '*(total_space - len(text)) POSSIBLE_SEQUENCES = None # what are nodes?", "find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]), current_sequence + amino, current_value + max_value) # returns", "out.save(d + '/amino_acids/' + amino + '.pdb') cleaning.cleanATOM(d + '/amino_acids/' + amino +", "string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3)))", "+= ' 1.00 0.00' string += find_white_space(11, placed[i][0]) + placed[i][0] g.write(string + '\\n')", "to the atoms that make it up def parse_aa(): if not os.path.exists(d +", "a)) max_value = max(values) if max_value > 0.8: for i in range(len(values)): if", "str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3)) string += ' 1.00 0.00' string += find_white_space(11,", "string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string += find_white_space(4, str(amino_num)) + str(amino_num) string", "D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 - x**2 - y**2))**(0.5) placed.append([new_nodes[i],", "i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/' + amino + '.pdb') cleaning.cleanATOM(d", "- y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)):", "' string += encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A'", "for a in AA: values.append(heuristic(current_ind,nodes, a)) max_value = max(values) if max_value > 0.8:", "in parsed_aa[amino_acid]: if (index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]: correct", "(2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 - x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord: final", "current = 0 # gets the currrent column of the first 5 rows", "counter = 0 amino_num = 0 for i in range(len(placed)): if counter ==", "max_value > 0.8: for i in range(len(values)): if max_value == values[i]: amino =", "all possible sequences of amino acid sequences keyed to heuristic efficiency values def", "given nodes def remove_padding(nodes): atoms = [] current = 0 # gets the", "1.00 0.00' string += find_white_space(11, placed[i][0]) + placed[i][0] g.write(string + '\\n') counter -=", "what are nodes? # decodes information into a pdb file # what does", "values[i]: amino = AA[i] find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]), current_sequence + amino, current_value", "Thu Aug 1 11:41:59 2019 @author: gemsec-user \"\"\" import numpy as np import", "string of whitespace specified def find_white_space(total_space, text): return ' '*(total_space - len(text)) POSSIBLE_SEQUENCES", "== 0: parse_aa() # adds the given value and sequence to the possible", "(D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 - D[0,i]**2 - P +", "= 'ATOM' #+ str(i + 1) + ' '+ encoding['seq_to_atoms'][i][0] string += find_white_space(7,", "2019 @author: gemsec-user \"\"\" import numpy as np import prody as pd import", "possible sequences dictionary if len(current_sequence) == length: global POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES:", "import * #from pdbtools import pdbtools as pdb d = os.getcwd() parsed_aa =", "= 0 for atom in parsed_aa[amino_acid]: if (index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) -", "= AA[i] find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]), current_sequence + amino, current_value + max_value)", "= placed[i][1][1] final[i, 2] = placed[i][1][2] return final else: for i in range(3,len(new_nodes)):", "placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2 + placed[2][1][1]**2 for i in range(3,len(new_nodes)): x =", "placed[2][1][0]**2 + placed[2][1][1]**2 for i in range(3,len(new_nodes)): x = (D[1,i]**2 - D[0,i]**2 -", "encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string += find_white_space(4,", "correctness in heuristic efficiency def heuristic(index, node, amino_acid): correct = 0 total =", "str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3)) string += '", "0: # adds the element index of the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current", "find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)): final[i, 0] = placed[i][1][0] final[i,", "import pdbtools as pdb d = os.getcwd() parsed_aa = {} # parses peptides", "0.8: for i in range(len(values)): if max_value == values[i]: amino = AA[i] find_sequence_recurs(nodes,", "for i in range(len(placed)): if counter == 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num +=", "new_nodes = remove_padding(encoding['index']) if not use_coord: D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)])", "gemsec-user \"\"\" import numpy as np import prody as pd import PeptideBuilder as", "POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values = [] for a in AA:", "+= AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string += find_white_space(4, str(amino_num)) + str(amino_num) string +=", "the given nodes def remove_padding(nodes): atoms = [] current = 0 # gets", "the first 5 rows col = nodes[0:5, current] while sum(col) != 0: #", "max_value) # returns a string of whitespace specified def find_white_space(total_space, text): return '", "-*- \"\"\" Created on Thu Aug 1 11:41:59 2019 @author: gemsec-user \"\"\" import", "3)) string += ' 1.00 0.00' string += find_white_space(11, placed[i][0]) + placed[i][0] g.write(string", "# stores a dictionary with peptide keys that map to the atoms that", "nodes def remove_padding(nodes): atoms = [] current = 0 # gets the currrent", "and sequence to the possible sequences dictionary if len(current_sequence) == length: global POSSIBLE_SEQUENCES", "on Thu Aug 1 11:41:59 2019 @author: gemsec-user \"\"\" import numpy as np", "[] for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are nodes? (2d array) #", "string += ' 1.00 0.00' string += find_white_space(11, placed[i][0]) + placed[i][0] g.write(string +", "parse_aa(): if not os.path.exists(d + '/amino_acids'): os.mkdir(d + '/amino_acids') global parsed_aa for amino", "'/amino_acids/' + amino + '.pdb') cleaning.cleanATOM(d + '/amino_acids/' + amino + '.pdb', out_file=", "import Decimal from symbols import * #from pdbtools import pdbtools as pdb d", "= nodes[0:5, current] return atoms # checks the rate of correctness in heuristic", "in AA: values.append(heuristic(current_ind,nodes, a)) max_value = max(values) if max_value > 0.8: for i", "atom[0]: correct += 1 total += 1 return float(correct/total) # finds all possible", "amino acid sequences keyed to heuristic efficiency values def find_sequence_recurs(nodes, length, current_ind, current_sequence,", "y = (D[2,i]**2 - D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 -", "into a pdb file # what does encoding look like? def decode(encoding, save_loc", "+ amino + '.pdb', out_file= d + '/amino_acids/' + amino + '.pdb', ext", "as pb import os import Bio import cleaning from decimal import Decimal from", "if (index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]: correct += 1", "= placed[i][1][0] final[i, 1] = placed[i][1][1] final[i, 2] = placed[i][1][2] return final else:", "if len(parsed_aa.keys()) == 0: parse_aa() if save_name == '': save_name = encoding['sequence'] +", "< len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]: correct += 1 total +=", "= encoding['sequence'] + '.pdb' placed = [] new_nodes = remove_padding(encoding['index']) if not use_coord:", "range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/' + save_name, 'w+') as g: counter", "if counter == 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string = 'ATOM'", "if not os.path.exists(d + '/amino_acids'): os.mkdir(d + '/amino_acids') global parsed_aa for amino in", "to heuristic efficiency values def find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value): if len(parsed_aa.keys()) ==", "the currrent column of the first 5 rows col = nodes[0:5, current] while", "+ 1) + ' '+ encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i + 1)) +", "[current_sequence] values = [] for a in AA: values.append(heuristic(current_ind,nodes, a)) max_value = max(values)", "d = os.getcwd() parsed_aa = {} # parses peptides and creates file structures", "what are nodes? (2d array) # returns the atoms from the given nodes", "float(correct/total) # finds all possible sequences of amino acid sequences keyed to heuristic", "find_white_space(11, placed[i][0]) + placed[i][0] g.write(string + '\\n') counter -= 1 return save_loc +", "pdbtools as pdb d = os.getcwd() parsed_aa = {} # parses peptides and", "+ '/amino_acids'): os.mkdir(d + '/amino_acids') global parsed_aa for amino in AA: # out", "and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]: correct += 1 total += 1 return", "- x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for i", "for atom in parsed_aa[amino_acid]: if (index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] ==", "map to the atoms that make it up def parse_aa(): if not os.path.exists(d", "placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1]) y =", "- D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 - D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z =", "range(3,len(new_nodes)): x = (D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 - D[0,i]**2", "Bio.PDB.PDBIO() # i is a peptide structure from amino acid i = pb.make_structure(amino,", "i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/' + save_name, 'w+') as", "'/amino_acids/' + amino + '.pdb', out_file= d + '/amino_acids/' + amino + '.pdb',", "col = nodes[0:5, current] while sum(col) != 0: # adds the element index", "'+ encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i + 1)) + str(i+1) + ' '", "node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col = nodes[0:5, current] return atoms #", "final[i, 1] = placed[i][1][1] final[i, 2] = placed[i][1][2] return final else: for i", "len(parsed_aa.keys()) == 0: parse_aa() if save_name == '': save_name = encoding['sequence'] + '.pdb'", "as pd import PeptideBuilder as pb import os import Bio import cleaning from", "pdbtools import pdbtools as pdb d = os.getcwd() parsed_aa = {} # parses", "is a peptide structure from amino acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d", "+ '/amino_acids') global parsed_aa for amino in AA: # out writes information to", "amino_num += 1 string = 'ATOM' #+ str(i + 1) + ' '+", "for i in range(3,len(new_nodes)): x = (D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y =", "= (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2 + placed[2][1][1]**2 for i", "placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/' + save_name, 'w+') as g: counter =", "x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2 + placed[2][1][1]**2 for i in range(3,len(new_nodes)): x", "# decodes information into a pdb file # what does encoding look like?", "global POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values =", "= [] for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are nodes? (2d array)", "find_white_space(4, str(amino_num)) + str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3)) string", "(abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2 + placed[2][1][1]**2 for i in", "placed[i][0] g.write(string + '\\n') counter -= 1 return save_loc + '/' + save_name", "amino + '.pdb', ext = '.pdb') temp = pd.parsePDB(d + '/amino_acids/' + amino", "= pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/' + amino + '.pdb') cleaning.cleanATOM(d +", "range(len(values)): if max_value == values[i]: amino = AA[i] find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]),", "returns the atoms from the given nodes def remove_padding(nodes): atoms = [] current", "= (D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1]) y = (abs(D[0,2]**2 - x**2))**(0.5)", "3))) + str(round(Decimal(placed[i][1][2]), 3)) string += ' 1.00 0.00' string += find_white_space(11, placed[i][0])", "structure from amino acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/' +", "for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are nodes? (2d array) # returns", "PeptideBuilder as pb import os import Bio import cleaning from decimal import Decimal", "= pd.parsePDB(d + '/amino_acids/' + amino + \".pdb\") # maps amino acids to", "# gets the currrent column of the first 5 rows col = nodes[0:5,", "= Bio.PDB.PDBIO() # i is a peptide structure from amino acid i =", "= None # what are nodes? # decodes information into a pdb file", "current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col = nodes[0:5, current] return atoms", "D[0,1]) y = (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2 + placed[2][1][1]**2", "return atoms # checks the rate of correctness in heuristic efficiency def heuristic(index,", "to files out = Bio.PDB.PDBIO() # i is a peptide structure from amino", "str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3))", "== length: global POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence]", "0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string = 'ATOM' #+ str(i +", "import os import Bio import cleaning from decimal import Decimal from symbols import", "str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3))", "in AA: # out writes information to files out = Bio.PDB.PDBIO() # i", "pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/' + amino + '.pdb') cleaning.cleanATOM(d + '/amino_acids/'", "does encoding look like? def decode(encoding, save_loc = d, save_name = '', find_coord", "index of the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col = nodes[0:5,", "1 11:41:59 2019 @author: gemsec-user \"\"\" import numpy as np import prody as", "save_name == '': save_name = encoding['sequence'] + '.pdb' placed = [] new_nodes =", "writes information to files out = Bio.PDB.PDBIO() # i is a peptide structure", "return float(correct/total) # finds all possible sequences of amino acid sequences keyed to", "os.path.exists(d + '/amino_acids'): os.mkdir(d + '/amino_acids') global parsed_aa for amino in AA: #", "- 1] == atom[0]: correct += 1 total += 1 return float(correct/total) #", "save_loc = d, save_name = '', find_coord = False, use_coord = False): if", "with open(save_loc + '/' + save_name, 'w+') as g: counter = 0 amino_num", "+ '/amino_acids/' + amino + '.pdb', ext = '.pdb') temp = pd.parsePDB(d +", "* #from pdbtools import pdbtools as pdb d = os.getcwd() parsed_aa = {}", "+ '.pdb', ext = '.pdb') temp = pd.parsePDB(d + '/amino_acids/' + amino +", "+= encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string +=", "while sum(col) != 0: # adds the element index of the current node", "1 col = nodes[0:5, current] return atoms # checks the rate of correctness", "as pdb d = os.getcwd() parsed_aa = {} # parses peptides and creates", "AA: # out writes information to files out = Bio.PDB.PDBIO() # i is", "nodes? # decodes information into a pdb file # what does encoding look", "atom in parsed_aa[amino_acid]: if (index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]:", "current_sequence, current_value): if len(parsed_aa.keys()) == 0: parse_aa() # adds the given value and", "amino = AA[i] find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]), current_sequence + amino, current_value +", "1 return float(correct/total) # finds all possible sequences of amino acid sequences keyed", "5 rows col = nodes[0:5, current] while sum(col) != 0: # adds the", "+ find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string += find_white_space(4, str(amino_num))", "0: parse_aa() if save_name == '': save_name = encoding['sequence'] + '.pdb' placed =", "their atoms parsed_aa[amino] = [] for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are", "find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value): if len(parsed_aa.keys()) == 0: parse_aa() # adds the", "look like? def decode(encoding, save_loc = d, save_name = '', find_coord = False,", "{} # parses peptides and creates file structures to store these peptides #", "temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) # what are nodes? (2d array) # returns the atoms from", "+ str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8,", "= remove_padding(encoding['index']) if not use_coord: D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x", "keyed to heuristic efficiency values def find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value): if len(parsed_aa.keys())", "+ '/amino_acids/' + amino + '.pdb') cleaning.cleanATOM(d + '/amino_acids/' + amino + '.pdb',", "+ '.pdb') cleaning.cleanATOM(d + '/amino_acids/' + amino + '.pdb', out_file= d + '/amino_acids/'", "to the possible sequences dictionary if len(current_sequence) == length: global POSSIBLE_SEQUENCES if current_value", "final[i, 2] = placed[i][1][2] return final else: for i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])])", "0 total = 0 for atom in parsed_aa[amino_acid]: if (index+total) < len(node) and", "= [] new_nodes = remove_padding(encoding['index']) if not use_coord: D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)])", "# returns the atoms from the given nodes def remove_padding(nodes): atoms = []", "return final else: for i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/'", "amino acids to their atoms parsed_aa[amino] = [] for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName()))", "checks the rate of correctness in heuristic efficiency def heuristic(index, node, amino_acid): correct", "+= find_white_space(4, str(amino_num)) + str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3))", "global parsed_aa for amino in AA: # out writes information to files out", "' 1.00 0.00' string += find_white_space(11, placed[i][0]) + placed[i][0] g.write(string + '\\n') counter", "values.append(heuristic(current_ind,nodes, a)) max_value = max(values) if max_value > 0.8: for i in range(len(values)):", "if save_name == '': save_name = encoding['sequence'] + '.pdb' placed = [] new_nodes", "import cleaning from decimal import Decimal from symbols import * #from pdbtools import", "the rate of correctness in heuristic efficiency def heuristic(index, node, amino_acid): correct =", "3))) + str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3)) string", "P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 - x**2 - y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if", "0 for i in range(len(placed)): if counter == 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num", "rate of correctness in heuristic efficiency def heuristic(index, node, amino_acid): correct = 0", "== values[i]: amino = AA[i] find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]), current_sequence + amino,", "# what are nodes? # decodes information into a pdb file # what", "# returns a string of whitespace specified def find_white_space(total_space, text): return ' '*(total_space", "length, current_ind + len(parsed_aa[amino]), current_sequence + amino, current_value + max_value) # returns a", "decode(encoding, save_loc = d, save_name = '', find_coord = False, use_coord = False):", "heuristic(index, node, amino_acid): correct = 0 total = 0 for atom in parsed_aa[amino_acid]:", "'.pdb', out_file= d + '/amino_acids/' + amino + '.pdb', ext = '.pdb') temp", "Created on Thu Aug 1 11:41:59 2019 @author: gemsec-user \"\"\" import numpy as", "!= 0: # adds the element index of the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)])", "+ '.pdb' placed = [] new_nodes = remove_padding(encoding['index']) if not use_coord: D =", "amino_num = 0 for i in range(len(placed)): if counter == 0: counter =", "'/amino_acids/' + amino + '.pdb', ext = '.pdb') temp = pd.parsePDB(d + '/amino_acids/'", "= False, use_coord = False): if len(parsed_aa.keys()) == 0: parse_aa() if save_name ==", "+= 1 string = 'ATOM' #+ str(i + 1) + ' '+ encoding['seq_to_atoms'][i][0]", "python3 # -*- coding: utf-8 -*- \"\"\" Created on Thu Aug 1 11:41:59", "possible sequences of amino acid sequences keyed to heuristic efficiency values def find_sequence_recurs(nodes,", "the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col = nodes[0:5, current] return", "str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]),", "a pdb file # what does encoding look like? def decode(encoding, save_loc =", "for i in range(len(values)): if max_value == values[i]: amino = AA[i] find_sequence_recurs(nodes, length,", "acids to their atoms parsed_aa[amino] = [] for atom in temp.iterAtoms(): parsed_aa[amino].append(str(atom.getName())) #", "+= 1 return float(correct/total) # finds all possible sequences of amino acid sequences", "final else: for i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/' +", "+ 1)) + str(i+1) + ' ' string += encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0])", "'.pdb') temp = pd.parsePDB(d + '/amino_acids/' + amino + \".pdb\") # maps amino", "in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/' + save_name, 'w+') as g:", "> 0.8: for i in range(len(values)): if max_value == values[i]: amino = AA[i]", "in range(len(values)): if max_value == values[i]: amino = AA[i] find_sequence_recurs(nodes, length, current_ind +", "information into a pdb file # what does encoding look like? def decode(encoding,", "- D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 - D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1])", "remove_padding(nodes): atoms = [] current = 0 # gets the currrent column of", "files out = Bio.PDB.PDBIO() # i is a peptide structure from amino acid", "if len(parsed_aa.keys()) == 0: parse_aa() # adds the given value and sequence to", "'.pdb') cleaning.cleanATOM(d + '/amino_acids/' + amino + '.pdb', out_file= d + '/amino_acids/' +", "+ amino, current_value + max_value) # returns a string of whitespace specified def", "current_value + max_value) # returns a string of whitespace specified def find_white_space(total_space, text):", "final[i, 0] = placed[i][1][0] final[i, 1] = placed[i][1][1] final[i, 2] = placed[i][1][2] return", "element index of the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1 col =", "values def find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value): if len(parsed_aa.keys()) == 0: parse_aa() #", "string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3)))", "correct += 1 total += 1 return float(correct/total) # finds all possible sequences", "from the given nodes def remove_padding(nodes): atoms = [] current = 0 #", "- D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 - x**2 - y**2))**(0.5)", "# out writes information to files out = Bio.PDB.PDBIO() # i is a", "atoms from the given nodes def remove_padding(nodes): atoms = [] current = 0", "adds the element index of the current node column atoms.append(ELEMENT_INDEX[col.tolist().index(1.0)]) current += 1", "find_coord = False, use_coord = False): if len(parsed_aa.keys()) == 0: parse_aa() if save_name", "== atom[0]: correct += 1 total += 1 return float(correct/total) # finds all", "counter == 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string = 'ATOM' #+", "def decode(encoding, save_loc = d, save_name = '', find_coord = False, use_coord =", "+ '/amino_acids/' + amino + \".pdb\") # maps amino acids to their atoms", "not os.path.exists(d + '/amino_acids'): os.mkdir(d + '/amino_acids') global parsed_aa for amino in AA:", "+= find_white_space(7, str(i + 1)) + str(i+1) + ' ' string += encoding['ele_to_amino'][i][0]", "open(save_loc + '/' + save_name, 'w+') as g: counter = 0 amino_num =", "= '.pdb') temp = pd.parsePDB(d + '/amino_acids/' + amino + \".pdb\") # maps", "+ '/' + save_name, 'w+') as g: counter = 0 amino_num = 0", "+ placed[2][1][1]**2 for i in range(3,len(new_nodes)): x = (D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1])", "= [current_sequence] values = [] for a in AA: values.append(heuristic(current_ind,nodes, a)) max_value =", "peptide structure from amino acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/'", "0: parse_aa() # adds the given value and sequence to the possible sequences", "stores a dictionary with peptide keys that map to the atoms that make", "0 # gets the currrent column of the first 5 rows col =", "current_ind, current_sequence, current_value): if len(parsed_aa.keys()) == 0: parse_aa() # adds the given value", "'/amino_acids/' + amino + \".pdb\") # maps amino acids to their atoms parsed_aa[amino]", "str(i + 1) + ' '+ encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i + 1))", "-*- coding: utf-8 -*- \"\"\" Created on Thu Aug 1 11:41:59 2019 @author:", "dictionary if len(current_sequence) == length: global POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else:", "\"\"\" import numpy as np import prody as pd import PeptideBuilder as pb", "[180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/' + amino + '.pdb') cleaning.cleanATOM(d + '/amino_acids/' +", "the atoms that make it up def parse_aa(): if not os.path.exists(d + '/amino_acids'):", "column of the first 5 rows col = nodes[0:5, current] while sum(col) !=", "+= find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) +", "max_value == values[i]: amino = AA[i] find_sequence_recurs(nodes, length, current_ind + len(parsed_aa[amino]), current_sequence +", "= {} # parses peptides and creates file structures to store these peptides", "+ str(round(Decimal(placed[i][1][0]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3)) string +=", "= nodes[0:5, current] while sum(col) != 0: # adds the element index of", "string += encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string", "numpy as np import prody as pd import PeptideBuilder as pb import os", "= (D[2,i]**2 - D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 - x**2", "parsed_aa for amino in AA: # out writes information to files out =", "current] while sum(col) != 0: # adds the element index of the current", "+= 1 col = nodes[0:5, current] return atoms # checks the rate of", "string += find_white_space(7, str(i + 1)) + str(i+1) + ' ' string +=", "len(text)) POSSIBLE_SEQUENCES = None # what are nodes? # decodes information into a", "out writes information to files out = Bio.PDB.PDBIO() # i is a peptide", "sequences of amino acid sequences keyed to heuristic efficiency values def find_sequence_recurs(nodes, length,", "* D[0,1]) y = (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2 +", "# what does encoding look like? def decode(encoding, save_loc = d, save_name =", "'ATOM' #+ str(i + 1) + ' '+ encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i", "len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string = 'ATOM' #+ str(i + 1) + '", "in heuristic efficiency def heuristic(index, node, amino_acid): correct = 0 total = 0", "- x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2 + placed[2][1][1]**2 for i in range(3,len(new_nodes)):", "def parse_aa(): if not os.path.exists(d + '/amino_acids'): os.mkdir(d + '/amino_acids') global parsed_aa for", "# maps amino acids to their atoms parsed_aa[amino] = [] for atom in", "+ save_name, 'w+') as g: counter = 0 amino_num = 0 for i", "given value and sequence to the possible sequences dictionary if len(current_sequence) == length:", "- D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1]) y = (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)])", "it up def parse_aa(): if not os.path.exists(d + '/amino_acids'): os.mkdir(d + '/amino_acids') global", "[] current = 0 # gets the currrent column of the first 5", "peptide keys that map to the atoms that make it up def parse_aa():", "sequences keyed to heuristic efficiency values def find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value): if", "values = [] for a in AA: values.append(heuristic(current_ind,nodes, a)) max_value = max(values) if", "AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string += find_white_space(4, str(amino_num)) + str(amino_num) string += find_white_space(12,", "#+ str(i + 1) + ' '+ encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i +", "save_name = encoding['sequence'] + '.pdb' placed = [] new_nodes = remove_padding(encoding['index']) if not", "D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2 - D[0,2]**2 -", "if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values = [] for", "\"\"\" Created on Thu Aug 1 11:41:59 2019 @author: gemsec-user \"\"\" import numpy", "finds all possible sequences of amino acid sequences keyed to heuristic efficiency values", "amino + \".pdb\") # maps amino acids to their atoms parsed_aa[amino] = []", "placed[i][1][2] return final else: for i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc +", "d, save_name = '', find_coord = False, use_coord = False): if len(parsed_aa.keys()) ==", "== 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string = 'ATOM' #+ str(i", "encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2 *", "the possible sequences dictionary if len(current_sequence) == length: global POSSIBLE_SEQUENCES if current_value in", "0] = placed[i][1][0] final[i, 1] = placed[i][1][1] final[i, 2] = placed[i][1][2] return final", "0 amino_num = 0 for i in range(len(placed)): if counter == 0: counter", "= np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)): final[i, 0] = placed[i][1][0] final[i, 1] =", "(2d array) # returns the atoms from the given nodes def remove_padding(nodes): atoms", "for i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc + '/' + save_name, 'w+')", "import PeptideBuilder as pb import os import Bio import cleaning from decimal import", "amino acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i) out.save(d + '/amino_acids/' + amino +", "D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 - D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z", "\".pdb\") # maps amino acids to their atoms parsed_aa[amino] = [] for atom", "string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3)) string += ' 1.00 0.00'", "(x,y,z)]) if find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)): final[i, 0] =", "os import Bio import cleaning from decimal import Decimal from symbols import *", "encoding['ele_to_amino'][i][0]) string += AA3[AA.index(encoding['ele_to_amino'][i][1])] + ' A' string += find_white_space(4, str(amino_num)) + str(amino_num)", "make it up def parse_aa(): if not os.path.exists(d + '/amino_acids'): os.mkdir(d + '/amino_acids')", "# what are nodes? (2d array) # returns the atoms from the given", "col = nodes[0:5, current] return atoms # checks the rate of correctness in", "ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]: correct += 1 total += 1 return float(correct/total)", "if not use_coord: D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1], (D[0,1],0,0)]) x = (D[1,2]**2", "= len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string = 'ATOM' #+ str(i + 1) +", "+ '/amino_acids/' + amino + '.pdb', out_file= d + '/amino_acids/' + amino +", "None # what are nodes? # decodes information into a pdb file #", "in range(3,len(new_nodes)): x = (D[1,i]**2 - D[0,i]**2 - D[0,1]**2)/(-2*D[0,1]) y = (D[2,i]**2 -", "A' string += find_white_space(4, str(amino_num)) + str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) +", "1) + ' '+ encoding['seq_to_atoms'][i][0] string += find_white_space(7, str(i + 1)) + str(i+1)", "from decimal import Decimal from symbols import * #from pdbtools import pdbtools as", "of the first 5 rows col = nodes[0:5, current] while sum(col) != 0:", "information to files out = Bio.PDB.PDBIO() # i is a peptide structure from", "current_value): if len(parsed_aa.keys()) == 0: parse_aa() # adds the given value and sequence", "False): if len(parsed_aa.keys()) == 0: parse_aa() if save_name == '': save_name = encoding['sequence']", "'w+') as g: counter = 0 amino_num = 0 for i in range(len(placed)):", "symbols import * #from pdbtools import pdbtools as pdb d = os.getcwd() parsed_aa", "current += 1 col = nodes[0:5, current] return atoms # checks the rate", "whitespace specified def find_white_space(total_space, text): return ' '*(total_space - len(text)) POSSIBLE_SEQUENCES = None", "= placed[i][1][2] return final else: for i in range(3,len(new_nodes)): placed.append([new_nodes[i], (encoding['coordinates'][i][0],encoding['coordinates'][i][1],encoding['coordinates'][i][2])]) with open(save_loc", "- D[0,1]**2)/(-2 * D[0,1]) y = (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P =", "length: global POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values", "def remove_padding(nodes): atoms = [] current = 0 # gets the currrent column", "POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values = [] for a in AA: values.append(heuristic(current_ind,nodes,", "POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values = []", "1)) + str(i+1) + ' ' string += encoding['ele_to_amino'][i][0] + find_white_space(4, encoding['ele_to_amino'][i][0]) string", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on Thu Aug 1", "3)) string += find_white_space(8, str(round(Decimal(placed[i][1][1]), 3))) + str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]),", "str(amino_num)) + str(amino_num) string += find_white_space(12, str(round(Decimal(placed[i][1][0]), 3))) + str(round(Decimal(placed[i][1][0]), 3)) string +=", "= 0 for i in range(len(placed)): if counter == 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]])", "total = 0 for atom in parsed_aa[amino_acid]: if (index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0])", "len(current_sequence) == length: global POSSIBLE_SEQUENCES if current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] =", "acid sequences keyed to heuristic efficiency values def find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value):", "pdb d = os.getcwd() parsed_aa = {} # parses peptides and creates file", "current_value in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values = [] for a", "placed[i][0]) + placed[i][0] g.write(string + '\\n') counter -= 1 return save_loc + '/'", "(index+total) < len(node) and ELEMENT_SYMBOLS[int(node[index+total][0]) - 1] == atom[0]: correct += 1 total", "D[0,1]**2)/(-2 * D[0,1]) y = (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2], (x,y,0)]) P = placed[2][1][0]**2", "pd.parsePDB(d + '/amino_acids/' + amino + \".pdb\") # maps amino acids to their", "len(parsed_aa.keys()) == 0: parse_aa() # adds the given value and sequence to the", "amino + '.pdb', out_file= d + '/amino_acids/' + amino + '.pdb', ext =", "to store these peptides # stores a dictionary with peptide keys that map", "max(values) if max_value > 0.8: for i in range(len(values)): if max_value == values[i]:", "x = (D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1]) y = (abs(D[0,2]**2 -", "array) # returns the atoms from the given nodes def remove_padding(nodes): atoms =", "out.set_structure(i) out.save(d + '/amino_acids/' + amino + '.pdb') cleaning.cleanATOM(d + '/amino_acids/' + amino", "cleaning from decimal import Decimal from symbols import * #from pdbtools import pdbtools", "nodes? (2d array) # returns the atoms from the given nodes def remove_padding(nodes):", "== 0: parse_aa() if save_name == '': save_name = encoding['sequence'] + '.pdb' placed", "+= find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3)) string += ' 1.00 0.00' string", "(D[1,2]**2 - D[0,2]**2 - D[0,1]**2)/(-2 * D[0,1]) y = (abs(D[0,2]**2 - x**2))**(0.5) placed.append([new_nodes[2],", "0.00' string += find_white_space(11, placed[i][0]) + placed[i][0] g.write(string + '\\n') counter -= 1", "efficiency values def find_sequence_recurs(nodes, length, current_ind, current_sequence, current_value): if len(parsed_aa.keys()) == 0: parse_aa()", "current_sequence + amino, current_value + max_value) # returns a string of whitespace specified", "rows col = nodes[0:5, current] while sum(col) != 0: # adds the element", "str(round(Decimal(placed[i][1][2]), 3)) string += ' 1.00 0.00' string += find_white_space(11, placed[i][0]) + placed[i][0]", "up def parse_aa(): if not os.path.exists(d + '/amino_acids'): os.mkdir(d + '/amino_acids') global parsed_aa", "of whitespace specified def find_white_space(total_space, text): return ' '*(total_space - len(text)) POSSIBLE_SEQUENCES =", "a dictionary with peptide keys that map to the atoms that make it", "y**2))**(0.5) placed.append([new_nodes[i], (x,y,z)]) if find_coord: final = np.zeros((len(encoding['secondary'][0]),3)) for i in range(len(placed)): final[i,", "@author: gemsec-user \"\"\" import numpy as np import prody as pd import PeptideBuilder", "i in range(len(placed)): if counter == 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1", "+ str(round(Decimal(placed[i][1][2]), 3)) string += ' 1.00 0.00' string += find_white_space(11, placed[i][0]) +", "current] return atoms # checks the rate of correctness in heuristic efficiency def", "= 0 amino_num = 0 for i in range(len(placed)): if counter == 0:", "dictionary with peptide keys that map to the atoms that make it up", "+ \".pdb\") # maps amino acids to their atoms parsed_aa[amino] = [] for", "# adds the given value and sequence to the possible sequences dictionary if", "# finds all possible sequences of amino acid sequences keyed to heuristic efficiency", "length, current_ind, current_sequence, current_value): if len(parsed_aa.keys()) == 0: parse_aa() # adds the given", "string = 'ATOM' #+ str(i + 1) + ' '+ encoding['seq_to_atoms'][i][0] string +=", "'/amino_acids'): os.mkdir(d + '/amino_acids') global parsed_aa for amino in AA: # out writes", "+= 1 total += 1 return float(correct/total) # finds all possible sequences of", "node, amino_acid): correct = 0 total = 0 for atom in parsed_aa[amino_acid]: if", "= d, save_name = '', find_coord = False, use_coord = False): if len(parsed_aa.keys())", "store these peptides # stores a dictionary with peptide keys that map to", "in POSSIBLE_SEQUENCES: POSSIBLE_SEQUENCES[current_value].append(current_sequence) else: POSSIBLE_SEQUENCES[current_value] = [current_sequence] values = [] for a in", "'*(total_space - len(text)) POSSIBLE_SEQUENCES = None # what are nodes? # decodes information", "in range(len(placed)): if counter == 0: counter = len(parsed_aa[encoding['ele_to_amino'][i][1]]) amino_num += 1 string", "+ str(round(Decimal(placed[i][1][1]), 3)) string += find_white_space(8, str(round(Decimal(placed[i][1][2]), 3))) + str(round(Decimal(placed[i][1][2]), 3)) string +=", "i is a peptide structure from amino acid i = pb.make_structure(amino, [180]*len(amino),[180]*len(amino)) out.set_structure(i)", "in range(len(placed)): final[i, 0] = placed[i][1][0] final[i, 1] = placed[i][1][1] final[i, 2] =", "atoms that make it up def parse_aa(): if not os.path.exists(d + '/amino_acids'): os.mkdir(d", "(D[2,i]**2 - D[0,i]**2 - P + (2*x*placed[2][1][0]))/(-2*placed[2][1][1]) z = (abs(D[0,i]**2 - x**2 -", "[] new_nodes = remove_padding(encoding['index']) if not use_coord: D = encoding['secondary'] placed.append([new_nodes[0], (0,0,0)]) placed.append([new_nodes[1]," ]
[ "EvalServer from summ_eval.server.helper import get_run_args args = get_run_args() server = EvalServer(args) server.start() server.join()", "def main(): from summ_eval.server import EvalServer from summ_eval.server.helper import get_run_args args = get_run_args()", "summ_eval.server import EvalServer from summ_eval.server.helper import get_run_args args = get_run_args() server = EvalServer(args)", "import EvalServer from summ_eval.server.helper import get_run_args args = get_run_args() server = EvalServer(args) server.start()", "main(): from summ_eval.server import EvalServer from summ_eval.server.helper import get_run_args args = get_run_args() server", "from summ_eval.server import EvalServer from summ_eval.server.helper import get_run_args args = get_run_args() server =" ]
[ "k8s_ver[\"gitVersion\"] assert \"cloud\" in params assert \"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\"", "params assert \"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data, tmp_path: P):", "cfg = Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in", ") cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\"", "\"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file,", "assert \"display_name\" in target_params assert target_params[\"display_name\"] == \"My Test Cluster\" assert \"catalog_url\" in", "import Inventory from commodore.config import Config @pytest.fixture def data(): \"\"\" Setup test data", "cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components): for cls in components: defaults = inv.defaults_file(cls)", "cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components): for cls in", "classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\"", "as P from textwrap import dedent from commodore import cluster from commodore.inventory import", "target_params assert target_params[\"name\"] == \"mycluster\" assert \"display_name\" in target_params assert target_params[\"display_name\"] == \"My", "classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target != \"\"", "params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in params params = params[\"parameters\"] assert \"cluster\"", "classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\"", "Test Tenant\" assert \"dist\" in target_params assert target_params[\"dist\"] == \"rancher\" assert \"facts\" in", "\"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"] == \"My Test Tenant\" assert \"dist\" in target_params", "== \"mycluster\" assert target in params target_params = params[target] assert \"name\" in target_params", "components) target = cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\") classes = [ \"params.cluster\",", "target = cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" ) classes = [", "commodore import cluster from commodore.inventory import Inventory from commodore.config import Config @pytest.fixture def", "tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent, exist_ok=True)", "def _setup_working_dir(inv: Inventory, components): for cls in components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True)", "\"tenant\": tenant, } def cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory,", "= cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\",", "= [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"cluster\", [\"foo\",", "_setup_working_dir(inv, components) target = cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\",", "assert \"minor\" in k8s_ver assert \"gitVersion\" in k8s_ver assert \"1\" == k8s_ver[\"major\"] assert", "assert target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"] == \"My Test", "exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv,", "def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def", "range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\"", "== \"foo-1\" def test_render_params(data, tmp_path: P): cfg = Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params", "params = params[\"parameters\"] assert \"cluster\" in params assert \"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"]", "assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data, tmp_path: P): cfg", "\"displayName\": \"My Test Tenant\", } cluster = { \"id\": \"mycluster\", \"displayName\": \"My Test", "target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path:", "assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data, tmp_path: P): cfg = Config(work_dir=tmp_path) target =", "target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path)", "pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True)", "\"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components)", "Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file = cfg.inventory.params_file", "classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\"", "from commodore.inventory import Inventory from commodore.config import Config @pytest.fixture def data(): \"\"\" Setup", "cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file", "\"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\": { \"major\": \"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\", }", "test_render_aliased_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target =", "\"rancher\" assert \"facts\" in params assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params", "[ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert target != \"\" print(target) assert len(target[\"classes\"])", "assert \"cloud\" in params assert \"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert", "cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id", "assert \"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\" assert target in params target_params", "\"My Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\":", "\"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return { \"cluster\": cluster, \"tenant\": tenant, } def cluster_from_data(data)", "\"global.commodore\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len( classes ),", "cluster_id == \"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file", "\"\"\" Unit-tests for target generation \"\"\" import os import click import pytest from", "pathlib import Path as P from textwrap import dedent from commodore import cluster", "as f: f.write( dedent( \"\"\" classes: [] parameters: {}\"\"\" ) ) with pytest.raises(KeyError):", "cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in params params = params[\"parameters\"] assert \"cluster\" in params", "\"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert target != \"\"", "assert \"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\" in params assert", "os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\" classes: [] parameters:", "= inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path:", "assert target != \"\" print(target) assert len(target[\"classes\"]) == len( classes ), \"rendered target", "cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\") classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\",", "\"mytenant\" assert \"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"] == \"My Test Tenant\" assert \"dist\"", "target generation \"\"\" import os import click import pytest from pathlib import Path", "\"cluster\" def test_render_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components)", "cluster_from_data(data)) def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException):", "import os import click import pytest from pathlib import Path as P from", "\"tenant\": tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\": {", "\"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len(", "assert target_params[\"tenant_display_name\"] == \"My Test Tenant\" assert \"dist\" in target_params assert target_params[\"dist\"] ==", "assert \"kubernetes_version\" in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver assert \"minor\"", "\"distribution\": \"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\": { \"major\": \"1\", \"minor\": \"21\",", "= params[\"parameters\"] assert \"cluster\" in params assert \"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"] ==", "cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\" parameters: cluster:", "= Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file =", "target includes different amount of classes\" for i in range(len(classes)): assert target[\"classes\"][i] ==", "in params assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params dyn_facts = params[\"dynamic_facts\"]", "assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path: P): components =", "params assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params dyn_facts = params[\"dynamic_facts\"] assert", "\"catalog_url\" in target_params assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in target_params", "t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\" assert tenant_id", "[ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target != \"\" print(target) assert", "def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\")", "inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\")", "\"components.foo\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len( classes ),", "\"\"\" parameters: cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory)", "target_params[\"tenant_display_name\"] == \"My Test Tenant\" assert \"dist\" in target_params assert target_params[\"dist\"] == \"rancher\"", "as f: f.write( dedent( \"\"\" parameters: cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) )", "pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg = Config(work_dir=tmp_path)", "-> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components): for cls in components:", "target_params assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in target_params assert target_params[\"tenant\"]", "{ \"id\": \"mycluster\", \"displayName\": \"My Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\",", "\"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len(", "target_params = params[target] assert \"name\" in target_params assert target_params[\"name\"] == \"mycluster\" assert \"display_name\"", "params target_params = params[target] assert \"name\" in target_params assert target_params[\"name\"] == \"mycluster\" assert", "\"bar\", \"baz\"], component=\"foo\") classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert", "\"v1.21.3\", } }, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return { \"cluster\": cluster,", "classes ), \"rendered target includes different amount of classes\" for i in range(len(classes)):", "\"facts\" in params assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params dyn_facts =", "Cluster\", \"tenant\": tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\":", "== \"mycluster\" assert \"display_name\" in target_params assert target_params[\"display_name\"] == \"My Test Cluster\" assert", "os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path)", "def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory,", "target_params assert target_params[\"display_name\"] == \"My Test Cluster\" assert \"catalog_url\" in target_params assert (", "\"\" print(target) assert len(target[\"classes\"]) == len( classes ), \"rendered target includes different amount", "from textwrap import dedent from commodore import cluster from commodore.inventory import Inventory from", "i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"]", "classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\"", "\"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target", "\"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg", "!= \"\" print(target) assert len(target[\"classes\"]) == len( classes ), \"rendered target includes different", "assert target in params target_params = params[target] assert \"name\" in target_params assert target_params[\"name\"]", "= [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert target != \"\" print(target)", "\"components.foo-comp\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len( classes ),", "test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data))", "Test Tenant\", } cluster = { \"id\": \"mycluster\", \"displayName\": \"My Test Cluster\", \"tenant\":", "= Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"]) classes =", "params dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\"", "= Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\"", "\"\" cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path)", "\"mycluster\" assert target in params target_params = params[target] assert \"name\" in target_params assert", "inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"],", "file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\"", "assert cluster_id == \"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path)", "\"mycluster\", \"displayName\": \"My Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\", \"cloud\": \"cloudscale\",", "assert \"dynamic_facts\" in params dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts k8s_ver =", "target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\",", "= inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\" classes:", "\"rendered target includes different amount of classes\" for i in range(len(classes)): assert target[\"classes\"][i]", "assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\" in params assert \"provider\" in params[\"cloud\"] assert", "in params target_params = params[target] assert \"name\" in target_params assert target_params[\"name\"] == \"mycluster\"", "test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as", "== len( classes ), \"rendered target includes different amount of classes\" for i", "== classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path: P): components = [\"foo\", \"bar\"]", "cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] =", "{ \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return { \"cluster\": cluster, \"tenant\": tenant, } def", "assert \"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"] == \"My Test Tenant\" assert \"dist\" in", "\"gitVersion\": \"v1.21.3\", } }, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return { \"cluster\":", "[\"foo\", \"bar\", \"baz\"], component=\"foo\") classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ]", "\"kubernetes_version\" in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver assert \"minor\" in", "\"parameters\" in params params = params[\"parameters\"] assert \"cluster\" in params assert \"name\" in", "target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path: P): components = [\"foo\",", "amount of classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"]", "in params assert \"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data, tmp_path:", "in params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg =", "classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert target != \"\" print(target)", "commodore.config import Config @pytest.fixture def data(): \"\"\" Setup test data \"\"\" tenant =", "target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in target_params assert target_params[\"tenant\"] == \"mytenant\" assert", "\"\"\" import os import click import pytest from pathlib import Path as P", "[\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target( inv, \"foo-1\", [\"foo-comp\",", "components) target = cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\",", "params assert \"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\" assert target in params", "Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"]) classes = [", "def test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target", "cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file,", "== \"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\", \"bar\"]", "exist_ok=True) defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P): components =", "assert \"gitVersion\" in k8s_ver assert \"1\" == k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"] assert", "== k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\" in", "def test_render_params(data, tmp_path: P): cfg = Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory,", "} return { \"cluster\": cluster, \"tenant\": tenant, } def cluster_from_data(data) -> cluster.Cluster: return", "in target_params assert target_params[\"dist\"] == \"rancher\" assert \"facts\" in params assert params[\"facts\"] ==", "cfg = Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f:", "= cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv", "= { \"id\": \"mycluster\", \"displayName\": \"My Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\": { \"distribution\":", "inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P): components = [\"foo\", \"bar\"] inv =", "\"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\", } }, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, }", "inv = Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f:", "target_params assert target_params[\"tenant_display_name\"] == \"My Test Tenant\" assert \"dist\" in target_params assert target_params[\"dist\"]", "target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data, tmp_path: P): cfg =", "f: f.write( dedent( \"\"\" parameters: cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id,", "\"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"])", "== \"mytenant\" assert \"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"] == \"My Test Tenant\" assert", "exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\" classes: [] parameters: {}\"\"\"", "for cls in components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component = inv.component_file(cls)", "\"kubernetes_version\": { \"major\": \"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\", } }, \"gitRepo\": { \"url\":", "\"mytenant\", \"displayName\": \"My Test Tenant\", } cluster = { \"id\": \"mycluster\", \"displayName\": \"My", "\"customer\" in params assert \"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data,", "assert tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent,", "{ \"kubernetes_version\": { \"major\": \"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\", } }, \"gitRepo\": {", "assert target_params[\"name\"] == \"mycluster\" assert \"display_name\" in target_params assert target_params[\"display_name\"] == \"My Test", "component=\"foo-comp\" ) classes = [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert target", "assert \"cluster\" in params assert \"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\" assert", "Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" )", "from pathlib import Path as P from textwrap import dedent from commodore import", "_setup_working_dir(inv, components) target = cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\",", "in k8s_ver assert \"1\" == k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\" ==", "== \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent, exist_ok=True) with", "assert target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path: P): components = [\"foo\", \"bar\"] inv =", "\"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data, tmp_path: P): cfg = Config(work_dir=tmp_path) target", "assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\" def", "params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver assert", "P): cfg = Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\"", "\"bar\", \"baz\"], component=\"foo-comp\" ) classes = [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ]", "i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"]", "in k8s_ver assert \"gitVersion\" in k8s_ver assert \"1\" == k8s_ver[\"major\"] assert \"21\" ==", "test data \"\"\" tenant = { \"id\": \"mytenant\", \"displayName\": \"My Test Tenant\", }", "cluster_from_data(data)) assert \"parameters\" in params params = params[\"parameters\"] assert \"cluster\" in params assert", "import pytest from pathlib import Path as P from textwrap import dedent from", "= { \"id\": \"mytenant\", \"displayName\": \"My Test Tenant\", } cluster = { \"id\":", "defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P): components = [\"foo\",", "= Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in params", "dedent from commodore import cluster from commodore.inventory import Inventory from commodore.config import Config", "assert target_params[\"dist\"] == \"rancher\" assert \"facts\" in params assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert", "import dedent from commodore import cluster from commodore.inventory import Inventory from commodore.config import", "assert target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\", \"bar\"] inv =", "= [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"fooer\", [\"foo\",", "\"global.commodore\", \"components.foo\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len( classes", "cluster, \"tenant\": tenant, } def cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv:", "params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path)", "assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\"", "assert \"facts\" in params assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params dyn_facts", "classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path: P): components = [\"foo\", \"bar\"] inv", "inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"]) classes", "\"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return { \"cluster\": cluster, \"tenant\": tenant, }", "for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert", "in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver assert \"minor\" in k8s_ver", "} cluster = { \"id\": \"mycluster\", \"displayName\": \"My Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\":", "k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\" in params assert \"provider\" in params[\"cloud\"]", "data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg", "data(): \"\"\" Setup test data \"\"\" tenant = { \"id\": \"mytenant\", \"displayName\": \"My", "in target_params assert target_params[\"display_name\"] == \"My Test Cluster\" assert \"catalog_url\" in target_params assert", "def test_render_aliased_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target", "range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path: P): components", "@pytest.fixture def data(): \"\"\" Setup test data \"\"\" tenant = { \"id\": \"mytenant\",", "of classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"] ==", "commodore.inventory import Inventory from commodore.config import Config @pytest.fixture def data(): \"\"\" Setup test", "}, \"dynamicFacts\": { \"kubernetes_version\": { \"major\": \"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\", } },", "cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components): for cls in components: defaults", "\"baz\"], component=\"foo-comp\" ) classes = [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert", "{ \"id\": \"mytenant\", \"displayName\": \"My Test Tenant\", } cluster = { \"id\": \"mycluster\",", "in k8s_ver assert \"minor\" in k8s_ver assert \"gitVersion\" in k8s_ver assert \"1\" ==", "file = inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\"", "inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P):", "Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in params params", "), \"rendered target includes different amount of classes\" for i in range(len(classes)): assert", "assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\" in params assert \"name\" in params[\"customer\"] assert", "classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\"", "of classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] ==", "params[target] assert \"name\" in target_params assert target_params[\"name\"] == \"mycluster\" assert \"display_name\" in target_params", "= cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\" parameters:", "== \"cloudscale\" assert \"customer\" in params assert \"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"] ==", "different amount of classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert", "\"defaults.bar\", \"global.commodore\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len( classes", "\"id\": \"mycluster\", \"displayName\": \"My Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\", \"cloud\":", "in target_params assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in target_params assert", "amount of classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"]", "in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\" in params assert \"name\" in", "k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver assert \"minor\" in k8s_ver assert \"gitVersion\"", "dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver assert \"minor\" in k8s_ver assert \"gitVersion\" in k8s_ver", "k8s_ver assert \"minor\" in k8s_ver assert \"gitVersion\" in k8s_ver assert \"1\" == k8s_ver[\"major\"]", "= cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\",", "cls in components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent,", "def test_render_bootstrap_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target", "\"mycluster\" assert \"display_name\" in target_params assert target_params[\"display_name\"] == \"My Test Cluster\" assert \"catalog_url\"", "from commodore import cluster from commodore.inventory import Inventory from commodore.config import Config @pytest.fixture", "\"major\": \"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\", } }, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", },", "] assert target != \"\" print(target) assert len(target[\"classes\"]) == len( classes ), \"rendered", "params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException):", "params[\"cluster\"][\"name\"] == \"mycluster\" assert target in params target_params = params[target] assert \"name\" in", "\"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return { \"cluster\": cluster, \"tenant\": tenant, } def cluster_from_data(data) ->", "in params assert \"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\" in", "\"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path: P): components = [\"foo\", \"bar\"] inv", "len(target[\"classes\"]) == len( classes ), \"rendered target includes different amount of classes\" for", "== \"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P):", "in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path: P):", "= inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P): components = [\"foo\", \"bar\"] inv", "= [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert target != \"\" print(target) assert", "\"dist\" in target_params assert target_params[\"dist\"] == \"rancher\" assert \"facts\" in params assert params[\"facts\"]", "\"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\", \"bar\"] inv", "f.write( dedent( \"\"\" parameters: cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id", "Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\") classes =", "\"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data, tmp_path: P):", "k8s_ver assert \"gitVersion\" in k8s_ver assert \"1\" == k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"]", "return { \"cluster\": cluster, \"tenant\": tenant, } def cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"],", "Setup test data \"\"\" tenant = { \"id\": \"mytenant\", \"displayName\": \"My Test Tenant\",", "components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"fooer\",", "\"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\") classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\",", "\"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) ==", "cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with", "= cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in params params = params[\"parameters\"]", "\"mytenant\" def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data))", "[\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" ) classes = [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\",", "in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"] ==", "cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv =", "{ \"distribution\": \"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\": { \"major\": \"1\", \"minor\":", "assert \"1\" == k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert", "\"minor\": \"21\", \"gitVersion\": \"v1.21.3\", } }, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return", "\"\"\" tenant = { \"id\": \"mytenant\", \"displayName\": \"My Test Tenant\", } cluster =", "name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id ==", "[\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"foo\", [\"foo\", \"bar\",", "assert \"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\")", "range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\"", "== \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data, tmp_path: P): cfg = Config(work_dir=tmp_path)", "in params params = params[\"parameters\"] assert \"cluster\" in params assert \"name\" in params[\"cluster\"]", "dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\" in", "\"dynamicFacts\": { \"kubernetes_version\": { \"major\": \"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\", } }, \"gitRepo\":", "parameters: cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert", "range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\"", "\"w\") as f: f.write( dedent( \"\"\" classes: [] parameters: {}\"\"\" ) ) with", "P from textwrap import dedent from commodore import cluster from commodore.inventory import Inventory", "tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path):", "Tenant\", } cluster = { \"id\": \"mycluster\", \"displayName\": \"My Test Cluster\", \"tenant\": tenant[\"id\"],", "\"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) ==", "target = cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\",", "cluster from commodore.inventory import Inventory from commodore.config import Config @pytest.fixture def data(): \"\"\"", "\"foo\" def test_render_aliased_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components)", "assert \"name\" in target_params assert target_params[\"name\"] == \"mycluster\" assert \"display_name\" in target_params assert", "in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"] ==", "Cluster\" assert \"catalog_url\" in target_params assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\"", "params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\" in", "\"gitVersion\" in k8s_ver assert \"1\" == k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\"", "in params dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert", "P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path: P):", "cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\" def", "assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path: P): components =", "== classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"] ==", "in target_params assert target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"] ==", "def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\")", "component.touch() def test_render_bootstrap_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components)", ") assert \"tenant\" in target_params assert target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\" in target_params", "\"cloud\" in params assert \"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\"", "= params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver", "components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"foo\",", "assert \"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\" in params assert", "tmp_path: P): cfg = Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert", "assert target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path: P): components = [\"foo\", \"bar\"] inv =", "with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg = Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent,", "cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\",", "Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\": {", "\"foo-1\" def test_render_params(data, tmp_path: P): cfg = Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params =", "assert \"parameters\" in params params = params[\"parameters\"] assert \"cluster\" in params assert \"name\"", "Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent(", "\"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\": { \"major\": \"1\", \"minor\": \"21\", \"gitVersion\":", "_setup_working_dir(inv, components) target = cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\") classes = [", "== \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv,", "Path as P from textwrap import dedent from commodore import cluster from commodore.inventory", "= params[target] assert \"name\" in target_params assert target_params[\"name\"] == \"mycluster\" assert \"display_name\" in", "Tenant\" assert \"dist\" in target_params assert target_params[\"dist\"] == \"rancher\" assert \"facts\" in params", "\"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" ) classes = [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\",", "_setup_working_dir(inv, components) target = cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" ) classes", "import cluster from commodore.inventory import Inventory from commodore.config import Config @pytest.fixture def data():", "test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target =", "target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components = [\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path)", "[\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert target", "== \"My Test Tenant\" assert \"dist\" in target_params assert target_params[\"dist\"] == \"rancher\" assert", "params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\" in params assert \"name\" in params[\"customer\"]", "\"baz\"], component=\"foo\") classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target", "open(file, \"w\") as f: f.write( dedent( \"\"\" classes: [] parameters: {}\"\"\" ) )", "== data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts", "\"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"],", "components) target = cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\",", "target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"]", "target != \"\" print(target) assert len(target[\"classes\"]) == len( classes ), \"rendered target includes", "= \"\" cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path): cfg =", "with open(file, \"w\") as f: f.write( dedent( \"\"\" classes: [] parameters: {}\"\"\" )", "target_params[\"display_name\"] == \"My Test Cluster\" assert \"catalog_url\" in target_params assert ( target_params[\"catalog_url\"] ==", "Inventory, components): for cls in components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component", "\"cloudscale\" assert \"customer\" in params assert \"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\"", "for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\" def", "classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path: P): components", "params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\" assert target in params target_params = params[target] assert", "Config @pytest.fixture def data(): \"\"\" Setup test data \"\"\" tenant = { \"id\":", "from commodore.config import Config @pytest.fixture def data(): \"\"\" Setup test data \"\"\" tenant", "len( classes ), \"rendered target includes different amount of classes\" for i in", "with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg =", "for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert", "\"My Test Tenant\", } cluster = { \"id\": \"mycluster\", \"displayName\": \"My Test Cluster\",", "\"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\" in params assert \"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"]", "\"1\" == k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\"", "k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\" in params", ") ) cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\" assert tenant_id ==", "= Config(work_dir=tmp_path) file = cfg.inventory.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write(", "}, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return { \"cluster\": cluster, \"tenant\": tenant,", "= [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"foo\", [\"foo\",", "\"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) ==", "\"display_name\" in target_params assert target_params[\"display_name\"] == \"My Test Cluster\" assert \"catalog_url\" in target_params", "return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components): for cls in components: defaults =", "target = cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in params params =", "target = cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\") classes = [ \"params.cluster\", \"defaults.foo\",", "for target generation \"\"\" import os import click import pytest from pathlib import", "tenant = { \"id\": \"mytenant\", \"displayName\": \"My Test Tenant\", } cluster = {", "textwrap import dedent from commodore import cluster from commodore.inventory import Inventory from commodore.config", "= Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write(", "assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in target_params assert target_params[\"tenant\"] ==", "Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"]) classes = [", "params assert \"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\" in params", "i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path:", "target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data, tmp_path: P): cfg = Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target", "test_render_params(data, tmp_path: P): cfg = Config(work_dir=tmp_path) target = cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data))", "classes = [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert target != \"\"", "assert \"customer\" in params assert \"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"] == \"mytenant\" def", "assert len(target[\"classes\"]) == len( classes ), \"rendered target includes different amount of classes\"", "target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"]", "\"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\" in params assert \"provider\"", "tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\": { \"major\":", "Test Cluster\" assert \"catalog_url\" in target_params assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert", "== \"mytenant\" def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory,", "for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert", "\"minor\" in k8s_ver assert \"gitVersion\" in k8s_ver assert \"1\" == k8s_ver[\"major\"] assert \"21\"", "Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent(", "\"facts\": { \"distribution\": \"rancher\", \"cloud\": \"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\": { \"major\": \"1\",", "\"My Test Cluster\" assert \"catalog_url\" in target_params assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" )", "click import pytest from pathlib import Path as P from textwrap import dedent", "open(file, \"w\") as f: f.write( dedent( \"\"\" parameters: cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\"", "\"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\" assert target in params target_params =", "== k8s_ver[\"gitVersion\"] assert \"cloud\" in params assert \"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] ==", "target_params assert target_params[\"dist\"] == \"rancher\" assert \"facts\" in params assert params[\"facts\"] == data[\"cluster\"][\"facts\"]", "import click import pytest from pathlib import Path as P from textwrap import", "in params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\" assert target in params target_params = params[target]", "params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\" in params assert \"name\" in params[\"customer\"] assert params[\"customer\"][\"name\"]", "tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path:", "in params assert \"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\" assert target in", "generation \"\"\" import os import click import pytest from pathlib import Path as", "in components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True)", "inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"]) classes", "component=\"foo\") classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target !=", "= cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" ) classes = [ \"params.cluster\",", "i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"]", "} def cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components): for", "assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def", "cluster = { \"id\": \"mycluster\", \"displayName\": \"My Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\": {", "exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\" parameters: cluster: name: c-twilight-water-9032", "os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\" parameters: cluster: name:", "params params = params[\"parameters\"] assert \"cluster\" in params assert \"name\" in params[\"cluster\"] assert", "includes different amount of classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i]", "\"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert target != \"\" print(target) assert len(target[\"classes\"])", "( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in target_params assert target_params[\"tenant\"] == \"mytenant\"", "assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\" def", "P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_read_cluster_and_tenant(tmp_path):", "assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert", "\"tenant\" in target_params assert target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"]", ") classes = [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert target !=", "= Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"]) classes =", "components): for cls in components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component =", "\"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\",", "target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path: P): components = [\"foo\",", "test_render_bootstrap_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target =", "[ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\", \"global.commodore\", \"components.foo-comp\", ] assert target != \"\" print(target) assert", "inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as f: f.write( dedent( \"\"\" classes: []", "components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"cluster\",", "def cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components): for cls", "== classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] ==", "[\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\",", "c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\"", "\"dynamic_facts\" in params dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"]", "defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def", "os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P): components", "\"21\", \"gitVersion\": \"v1.21.3\", } }, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return {", "inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" ) classes = [ \"params.cluster\", \"defaults.foo-comp\", \"defaults.bar\",", "in target_params assert target_params[\"tenant_display_name\"] == \"My Test Tenant\" assert \"dist\" in target_params assert", "k8s_ver assert \"1\" == k8s_ver[\"major\"] assert \"21\" == k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"]", "\"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target !=", "== \"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path: P): components = [\"foo\", \"bar\"]", "target = cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\",", "components) target = cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" ) classes =", "\"global.commodore\", \"components.foo-comp\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len( classes", "= Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\") classes", "dedent( \"\"\" parameters: cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id =", "tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def", "\"\"\" Setup test data \"\"\" tenant = { \"id\": \"mytenant\", \"displayName\": \"My Test", "}, } return { \"cluster\": cluster, \"tenant\": tenant, } def cluster_from_data(data) -> cluster.Cluster:", "_setup_working_dir(inv: Inventory, components): for cls in components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch()", "os import click import pytest from pathlib import Path as P from textwrap", "\"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target != \"\" print(target) assert len(target[\"classes\"])", "cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\" ) classes = [ \"params.cluster\", \"defaults.foo-comp\",", "params[\"parameters\"] assert \"cluster\" in params assert \"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\"", "target in params target_params = params[target] assert \"name\" in target_params assert target_params[\"name\"] ==", "assert \"dist\" in target_params assert target_params[\"dist\"] == \"rancher\" assert \"facts\" in params assert", "def test_render_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target", "== \"cluster\" def test_render_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv,", "P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv,", "dyn_facts k8s_ver = dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver assert \"minor\" in k8s_ver assert", "import Config @pytest.fixture def data(): \"\"\" Setup test data \"\"\" tenant = {", "cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg = Config(work_dir=tmp_path) with", "\"displayName\": \"My Test Cluster\", \"tenant\": tenant[\"id\"], \"facts\": { \"distribution\": \"rancher\", \"cloud\": \"cloudscale\", },", "assert \"catalog_url\" in target_params assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in", "== \"rancher\" assert \"facts\" in params assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in", "Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"] = \"\" cfg", "data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components): for cls in components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent,", "components: defaults = inv.defaults_file(cls) os.makedirs(defaults.parent, exist_ok=True) defaults.touch() component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch()", "test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data,", "\"id\": \"mytenant\", \"displayName\": \"My Test Tenant\", } cluster = { \"id\": \"mycluster\", \"displayName\":", "test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file = inv.params_file os.makedirs(file.parent, exist_ok=True) with open(file, \"w\") as", "\"major\" in k8s_ver assert \"minor\" in k8s_ver assert \"gitVersion\" in k8s_ver assert \"1\"", "f: f.write( dedent( \"\"\" classes: [] parameters: {}\"\"\" ) ) with pytest.raises(KeyError): cluster.read_cluster_and_tenant(inv)", "= cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\", \"baz\"], component=\"foo\") classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\",", "assert params[\"customer\"][\"name\"] == \"mytenant\" def test_missing_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with", "\"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"foo\", [\"foo\", \"bar\", \"baz\"])", "\"name\" in target_params assert target_params[\"name\"] == \"mycluster\" assert \"display_name\" in target_params assert target_params[\"display_name\"]", "\"provider\" in params[\"cloud\"] assert params[\"cloud\"][\"provider\"] == \"cloudscale\" assert \"customer\" in params assert \"name\"", "== k8s_ver[\"minor\"] assert \"v1.21.3\" == k8s_ver[\"gitVersion\"] assert \"cloud\" in params assert \"provider\" in", "print(target) assert len(target[\"classes\"]) == len( classes ), \"rendered target includes different amount of", "classes\" for i in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\"", "= dyn_facts[\"kubernetes_version\"] assert \"major\" in k8s_ver assert \"minor\" in k8s_ver assert \"gitVersion\" in", "assert params[\"cluster\"][\"name\"] == \"mycluster\" assert target in params target_params = params[target] assert \"name\"", "== \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data, tmp_path:", "[\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(inv, \"fooer\", [\"foo\", \"bar\",", "[\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert", "data \"\"\" tenant = { \"id\": \"mytenant\", \"displayName\": \"My Test Tenant\", } cluster", "== \"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file =", "\"foo\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ]", "tenant: t-delicate-pine-3938\"\"\" ) ) cluster_id, tenant_id = cluster.read_cluster_and_tenant(cfg.inventory) assert cluster_id == \"c-twilight-water-9032\" assert", "= [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", \"components.foo\", ] assert target != \"\" print(target)", "in target_params assert target_params[\"name\"] == \"mycluster\" assert \"display_name\" in target_params assert target_params[\"display_name\"] ==", "= Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target( inv, \"foo-1\", [\"foo-comp\", \"bar\", \"baz\"], component=\"foo-comp\"", "target_params[\"dist\"] == \"rancher\" assert \"facts\" in params assert params[\"facts\"] == data[\"cluster\"][\"facts\"] assert \"dynamic_facts\"", "== \"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in target_params assert target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\"", "assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components =", "target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path:", "import Path as P from textwrap import dedent from commodore import cluster from", "components = [\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target( inv,", "\"My Test Tenant\" assert \"dist\" in target_params assert target_params[\"dist\"] == \"rancher\" assert \"facts\"", "\"cloud\": \"cloudscale\", }, \"dynamicFacts\": { \"kubernetes_version\": { \"major\": \"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\",", "test_render_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target =", "\"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert target !=", "def data(): \"\"\" Setup test data \"\"\" tenant = { \"id\": \"mytenant\", \"displayName\":", "\"c-twilight-water-9032\" assert tenant_id == \"t-delicate-pine-3938\" def test_read_cluster_and_tenant_missing_fact(tmp_path): inv = Inventory(work_dir=tmp_path) file = inv.params_file", "assert target_params[\"display_name\"] == \"My Test Cluster\" assert \"catalog_url\" in target_params assert ( target_params[\"catalog_url\"]", "\"cluster\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert", "target_params assert target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"] == \"My", "P): components = [\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target(", "= cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in params params = params[\"parameters\"] assert \"cluster\" in", "= [\"foo-comp\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv, components) target = cluster.render_target( inv, \"foo-1\",", "\"ssh://git@git.example.com/cluster-catalogs/mycluster\" ) assert \"tenant\" in target_params assert target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\" in", "cfg.inventory.bootstrap_target params = cluster.render_params(cfg.inventory, cluster_from_data(data)) assert \"parameters\" in params params = params[\"parameters\"] assert", "\"defaults.foo\", \"defaults.bar\", \"global.commodore\", ] assert target != \"\" print(target) assert len(target[\"classes\"]) == len(", "== \"foo\" def test_render_aliased_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path) _setup_working_dir(inv,", "\"cluster\" in params assert \"name\" in params[\"cluster\"] assert params[\"cluster\"][\"name\"] == \"mycluster\" assert target", "pytest from pathlib import Path as P from textwrap import dedent from commodore", "component = inv.component_file(cls) os.makedirs(component.parent, exist_ok=True) component.touch() def test_render_bootstrap_target(tmp_path: P): components = [\"foo\", \"bar\"]", "cluster.render_target(inv, \"cluster\", [\"foo\", \"bar\", \"baz\"]) classes = [ \"params.cluster\", \"defaults.foo\", \"defaults.bar\", \"global.commodore\", ]", "Inventory from commodore.config import Config @pytest.fixture def data(): \"\"\" Setup test data \"\"\"", "== classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo\" assert target[\"parameters\"][\"_instance\"] == \"foo\" def test_render_aliased_target(tmp_path: P):", "in range(len(classes)): assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] ==", "target[\"parameters\"][\"_instance\"] == \"cluster\" def test_render_target(tmp_path: P): components = [\"foo\", \"bar\"] inv = Inventory(work_dir=tmp_path)", "target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"foo-1\" assert target[\"parameters\"][\"foo_comp\"] == \"${foo_1}\" assert target[\"parameters\"][\"_instance\"] == \"foo-1\" def test_render_params(data,", "assert \"major\" in k8s_ver assert \"minor\" in k8s_ver assert \"gitVersion\" in k8s_ver assert", "\"w\") as f: f.write( dedent( \"\"\" parameters: cluster: name: c-twilight-water-9032 tenant: t-delicate-pine-3938\"\"\" )", "data[\"cluster\"][\"facts\"].pop(\"cloud\") cfg = Config(work_dir=tmp_path) with pytest.raises(click.ClickException): cluster.render_params(cfg.inventory, cluster_from_data(data)) def test_empty_facts(data, tmp_path: P): data[\"cluster\"][\"facts\"][\"cloud\"]", "data[\"cluster\"][\"facts\"] assert \"dynamic_facts\" in params dyn_facts = params[\"dynamic_facts\"] assert \"kubernetes_version\" in dyn_facts k8s_ver", "{ \"major\": \"1\", \"minor\": \"21\", \"gitVersion\": \"v1.21.3\", } }, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\",", "\"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert target[\"parameters\"][\"_instance\"] == \"fooer\" def test_render_aliased_target_with_dash(tmp_path: P): components", "with open(file, \"w\") as f: f.write( dedent( \"\"\" parameters: cluster: name: c-twilight-water-9032 tenant:", "{ \"cluster\": cluster, \"tenant\": tenant, } def cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"])", "target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\" in target_params assert target_params[\"tenant_display_name\"] == \"My Test Tenant\"", "target_params[\"name\"] == \"mycluster\" assert \"display_name\" in target_params assert target_params[\"display_name\"] == \"My Test Cluster\"", "assert \"tenant\" in target_params assert target_params[\"tenant\"] == \"mytenant\" assert \"tenant_display_name\" in target_params assert", "} }, \"gitRepo\": { \"url\": \"ssh://git@git.example.com/cluster-catalogs/mycluster\", }, } return { \"cluster\": cluster, \"tenant\":", "assert target[\"classes\"][i] == classes[i] assert target[\"parameters\"][\"kapitan\"][\"vars\"][\"target\"] == \"fooer\" assert target[\"parameters\"][\"foo\"] == \"${fooer}\" assert", "\"cluster\": cluster, \"tenant\": tenant, } def cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def", "Unit-tests for target generation \"\"\" import os import click import pytest from pathlib", "== \"My Test Cluster\" assert \"catalog_url\" in target_params assert ( target_params[\"catalog_url\"] == \"ssh://git@git.example.com/cluster-catalogs/mycluster\"", "tenant, } def cluster_from_data(data) -> cluster.Cluster: return cluster.Cluster(data[\"cluster\"], data[\"tenant\"]) def _setup_working_dir(inv: Inventory, components):" ]
[ "gdal.GetDriverByName( format ) # metadata = driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\ # and", "= 5000 y_res = 5000 # target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) #", "layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5", "related to each polygon, count # the pixels with given values that are", "# dataSource=None # layerbr=None # dataSourcebr=None # layer_rd=None # dataSource_rd=None # target_ds= None", "driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True) layer", "there are more then 1 catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye", "np.histogram(classes, bins=[0,1,20]) classes2 = classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25))", "classification covers the # whole country. We have rural properties boundaries and other", "Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array = band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) # Read", "file and put the results in 3 band for testing purposes # #", "that we want to verify # how much area was classified as being", "to the vector layer with features of interest. # # Author: leandro.biondo #", "banda_class geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef() # print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min", "band_num print e #sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes = banda_class", "int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d | %d | %d | %d\" %", "burn_values=[1]) array = band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) # Read as array dstlayer=None memsource.Destroy()", "other poligons that we want to verify # how much area was classified", "how much area was classified as being one of 13 distinct classes. This", "e: # for example, try GetRasterBand(10) print 'Band ( %i ) not found'", "try: srcband = src_ds.GetRasterBand(1) print srcband except RuntimeError, e: # for example, try", "separate the intended classes and the count of # each bin is added", ", caminho_feat #print verifica_f cont_loop+=1 if geom2.Intersects(geom) : c5+=1 if (verifica_f is None):", "# how much area was classified as being one of 13 distinct classes.", "feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\",", "bins=[0,1,20]) classes2 = classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\",", "geom2.Intersects(geom) : c5+=1 if (verifica_f is None): intersect = geom.Intersection(geom2) print intersect.GetArea() print", "print infile rapideye = infile driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye, True)", "x_min = contorno[0] y_max = contorno[3] x_res = 5000 y_res = 5000 #", "classification that was automaticaly generated, this classification covers the # whole country. We", "True) layer_rd = dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True) layer =", "mask. # The mask has the same resolution as the original image (RapidEye,", "from vector relations # Purpose: Classify features of interest based on a raster", "| %d | %d\" % (c5,contard,conta,cont_loop) c5+=1 #create an image file and put", "vector layer with features of interest. # # Author: leandro.biondo # # Created:", "14 possible values). # Finally a histogram is made with bins that separate", "area was classified as being one of 13 distinct classes. This aproach gets", "the original image (RapidEye, 5 meters) with binary values, # being 1 if", "use a * as mask if there are more then 1 catalog for", "boudary option. The path (caminho) field will be used to open #the images", "layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5 NoData_value = 255 contard", "# dataSource_rd=None # target_ds= None # print 'fim forcado' # break # target_ds=", "target_ds= None # print 'fim forcado' # break # target_ds= None #break layer.ResetReading()", "resposta1 = np.histogram(classes, bins=[0,1,20]) classes2 = classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25))", "\"GTiff\" # driver2 = gdal.GetDriverByName( format ) # metadata = driver2.GetMetadata() # if", "forcado' # break # target_ds= None #break layer.ResetReading() layer=None dataSource=None layerbr=None dataSourcebr=None layer_rd=None", "is None): intersect = geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1", "found' % band_num print e #sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes", "None): intersect = geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef", "%d | %d | %d\" % (c5,contard,conta,cont_loop) c5+=1 #create an image file and", "built with the Qgis plugin #\"Image Footprint\", it is necessary to select image", "13 distinct classes. This aproach gets each # image boundary polygon intersection with", "% band_num print e #sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes =", "tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform())", "as np from osgeo import ogr, osr import glob import os gdal.UseExceptions() #", "int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel:", "break # target_ds= None #break layer.ResetReading() layer=None dataSource=None layerbr=None dataSourcebr=None layer_rd=None dataSource_rd=None print", "try: src_ds = gdal.Open( caminho_img) except RuntimeError, e: print 'Unable to open INPUT'", "print 'Driver %s supports Create() method.' % format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\ #", "usage classification that was automaticaly generated, this classification covers the # whole country.", "', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0] y_max = contorno[3] x_res = 5000 y_res", "intended classes and the count of # each bin is added to the", "the Qgis plugin #\"Image Footprint\", it is necessary to select image boudary option.", "pixel_size = 5 NoData_value = 255 contard =0 c5=0 for feat_rd in layer_rd:", "# # dst_ds = driver2.Create( saida, 5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) #", "values that are inside any given polygon. The raster files have a #", "feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d", "classified as being one of 13 distinct classes. This aproach gets each #", "as a matrix by the matrix of pixel values from the image (in", "caminho_feat #print verifica_f cont_loop+=1 if geom2.Intersects(geom) : c5+=1 if (verifica_f is None): intersect", "gets each # image boundary polygon intersection with each feature of interest and", "SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste',", "mask is then multiplied # as a matrix by the matrix of pixel", "int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25))", "Name: Raster information from vector relations # Purpose: Classify features of interest based", "if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY] == 'YES': # print 'Driver %s supports", "= np.histogram(classes, bins=[0,1,20]) classes2 = classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\",", "continue conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer", "# each bin is added to the vector layer with features of interest.", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5 NoData_value = 255", "# dst_ds=None # # # if c5==10: # layer=None # dataSource=None # layerbr=None", "dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False)", "Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python # import modules import gdal import numpy", "# dst_ds = driver2.Create( saida, 5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs", "= driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE] == 'YES': # print", "# # if c5==10: # layer=None # dataSource=None # layerbr=None # dataSourcebr=None #", "layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\",", "with bins that separate the intended classes and the count of # each", "feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d | %d", "print 'fim forcado' # break # target_ds= None #break layer.ResetReading() layer=None dataSource=None layerbr=None", "dataSourcebr=None # layer_rd=None # dataSource_rd=None # target_ds= None # print 'fim forcado' #", "# layerbr=None # dataSourcebr=None # layer_rd=None # dataSource_rd=None # target_ds= None # print", "np from osgeo import ogr, osr import glob import os gdal.UseExceptions() # #shapefilebr", "in layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print caminho_img try: src_ds = gdal.Open( caminho_img) except", "catalog, This catalog can be built with the Qgis plugin #\"Image Footprint\", it", "% (contard,c5) # format = \"GTiff\" # driver2 = gdal.GetDriverByName( format ) #", "boundary polygon intersection with each feature of interest and builds a raster mask.", "contorno[3] x_res = 5000 y_res = 5000 # target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res,", "print \"ImagemImovel: %d | %d | %d | %d\" % (c5,contard,conta,cont_loop) c5+=1 #create", "5 meters) with binary values, # being 1 if the pixel is part", "leandro.biondo 2016 # Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python # import modules import", "intersection with each feature of interest and builds a raster mask. # The", "intersect = geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef =", "dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1],", "that was automaticaly generated, this classification covers the # whole country. We have", "RuntimeError, e: # for example, try GetRasterBand(10) print 'Band ( %i ) not", "SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds =", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False)", "dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # # # if c5==10: #", "(in this case 14 possible values). # Finally a histogram is made with", "mask if there are more then 1 catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print", "is part of the intersection and 0 if it is not. This mask", "# target_ds= None # print 'fim forcado' # break # target_ds= None #break", "int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25))", "'YES': # print 'Driver %s supports Create() method.' % format # if metadata.has_key(gdal.DCAP_CREATECOPY)", "# Copyright: (c) leandro.biondo 2016 # Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python #", "images with classified pixels, you can use a * as mask if there", "rural properties boundaries and other poligons that we want to verify # how", "ogr, osr import glob import os gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver =", "necessary to select image boudary option. The path (caminho) field will be used", "Create() method.' % format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY] == 'YES':", "except RuntimeError, e: print 'Unable to open INPUT' print e #break continue try:", "histogram is made with bins that separate the intended classes and the count", "saida, 5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection())", "shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\",", "# saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format = \"GTiff\" # driver2 =", "('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\",", "SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value)", "= ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp')", "c5=0 for feat_rd in layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print caminho_img try: src_ds =", "feat_rd in layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print caminho_img try: src_ds = gdal.Open( caminho_img)", "layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\",", "metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE] == 'YES': # print 'Driver %s supports Create()", "# band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0 for feature in", "image file and put the results in 3 band for testing purposes #", "['COMPRESS=LZW'] ) # srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes)", "import os gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr =", "if c5==10: # layer=None # dataSource=None # layerbr=None # dataSourcebr=None # layer_rd=None #", "This aproach gets each # image boundary polygon intersection with each feature of", "Copyright: (c) leandro.biondo 2016 # Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python # import", "whole country. We have rural properties boundaries and other poligons that we want", "classified pixels, you can use a * as mask if there are more", "was classified as being one of 13 distinct classes. This aproach gets each", "not. This mask is then multiplied # as a matrix by the matrix", "made with bins that separate the intended classes and the count of #", "# whole country. We have rural properties boundaries and other poligons that we", "contorno[0] y_max = contorno[3] x_res = 5000 y_res = 5000 # target_ds =", "# dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # # # if c5==10:", "for testing purposes # # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format =", "in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat #print verifica_f cont_loop+=1 if geom2.Intersects(geom)", "# layer=None # dataSource=None # layerbr=None # dataSourcebr=None # layer_rd=None # dataSource_rd=None #", "dataSource_rd = driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile,", "y_res = 5000 # target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) #", "each feature of interest and builds a raster mask. # The mask has", "int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25))", "open #the images with classified pixels, you can use a * as mask", "dataSource=None # layerbr=None # dataSourcebr=None # layer_rd=None # dataSource_rd=None # target_ds= None #", "that are inside any given polygon. The raster files have a # land", "( %i ) not found' % band_num print e #sys.exit(1) continue banda_class =", "the count of # each bin is added to the vector layer with", "dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # # #", "osr import glob import os gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI", "you can use a * as mask if there are more then 1", "as mask if there are more then 1 catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'):", "int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d | %d |", "images related to each polygon, count # the pixels with given values that", "features of interest based on a raster with pixels that have classification values.", "given the vector layer with the catalog, This catalog can be built with", "from the image (in this case 14 possible values). # Finally a histogram", "of interest and builds a raster mask. # The mask has the same", "# Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array = band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) #", "[1], dstlayer, burn_values=[1]) array = band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) # Read as array", "catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye = infile driver = ogr.GetDriverByName(\"ESRI", "a raster mask. # The mask has the same resolution as the original", "'spat ', layer_rd.GetSpatialRef() # print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0] y_max", "mask has the same resolution as the original image (RapidEye, 5 meters) with", "image (RapidEye, 5 meters) with binary values, # being 1 if the pixel", "* as mask if there are more then 1 catalog for infile in", "= src_ds.GetRasterBand(1) print srcband except RuntimeError, e: # for example, try GetRasterBand(10) print", "automaticaly generated, this classification covers the # whole country. We have rural properties", "layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5 NoData_value = 255 contard =0 c5=0 for feat_rd", "image boudary option. The path (caminho) field will be used to open #the", "field will be used to open #the images with classified pixels, you can", "layer_rd.GetSpatialRef() # print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0] y_max = contorno[3]", "feature of interest and builds a raster mask. # The mask has the", "1 if the pixel is part of the intersection and 0 if it", "each # image boundary polygon intersection with each feature of interest and builds", "= ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False)", "given values that are inside any given polygon. The raster files have a", "features of interest. # # Author: leandro.biondo # # Created: 05/10/2016 # Copyright:", "the pixel is part of the intersection and 0 if it is not.", "#sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef() #print 'spat", "#print 'spat ', layer_rd.GetSpatialRef() # print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0]", "with the catalog, This catalog can be built with the Qgis plugin #\"Image", "boundaries and other poligons that we want to verify # how much area", "verify # how much area was classified as being one of 13 distinct", "and builds a raster mask. # The mask has the same resolution as", "= dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\",", "# the pixels with given values that are inside any given polygon. The", "verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat #print verifica_f cont_loop+=1 if geom2.Intersects(geom) : c5+=1 if", "resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\",", "to select image boudary option. The path (caminho) field will be used to", "= osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2)", "dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None #", "of interest based on a raster with pixels that have classification values. #", "band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0 for feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat'", "Classify features of interest based on a raster with pixels that have classification", "gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True)", "target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band =", "# if metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE] == 'YES': # print 'Driver %s", "1 catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye = infile driver =", "if geom2.Intersects(geom) : c5+=1 if (verifica_f is None): intersect = geom.Intersection(geom2) print intersect.GetArea()", "it is necessary to select image boudary option. The path (caminho) field will", "= ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer,", "is made with bins that separate the intended classes and the count of", "interest. # # Author: leandro.biondo # # Created: 05/10/2016 # Copyright: (c) leandro.biondo", "if the pixel is part of the intersection and 0 if it is", "#print tabela resposta1 = np.histogram(classes, bins=[0,1,20]) classes2 = classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20])", "driver2 = gdal.GetDriverByName( format ) # metadata = driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\", "True) #layerbr = dataSourcebr.GetLayer() #Here should be given the vector layer with the", "with binary values, # being 1 if the pixel is part of the", "dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array", "feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\",", "caminho_img) except RuntimeError, e: print 'Unable to open INPUT' print e #break continue", "caminho_img = feat_rd.GetField(\"caminho\") print caminho_img try: src_ds = gdal.Open( caminho_img) except RuntimeError, e:", "feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature)", "print 'Driver %s supports CreateCopy() method.' % format # # dst_ds = driver2.Create(", "classification values. # Having a catalog in a vector layer with adresses of", "a * as mask if there are more then 1 catalog for infile", "open INPUT' print e #break continue try: srcband = src_ds.GetRasterBand(1) print srcband except", "print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY')", "= srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef() #", "(intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData')", "have classification values. # Having a catalog in a vector layer with adresses", "plugin #\"Image Footprint\", it is necessary to select image boudary option. The path", "srcband.ReadAsArray() #print tabela resposta1 = np.histogram(classes, bins=[0,1,20]) classes2 = classes*array resposta = np.histogram(classes2,", "= np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25))", "pixel is part of the intersection and 0 if it is not. This", "= geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef = osr.SpatialReference()", "c5+=1 if (verifica_f is None): intersect = geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if", "# dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # # # if c5==10: # layer=None # dataSource=None", "if banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef() # print 'proj", "(c5,contard,conta,cont_loop) c5+=1 #create an image file and put the results in 3 band", "int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25))", "int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d |", "files have a # land usage classification that was automaticaly generated, this classification", "Shapefile\") dataSource_rd = driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource =", "# The mask has the same resolution as the original image (RapidEye, 5", "Finally a histogram is made with bins that separate the intended classes and", "raster files have a # land usage classification that was automaticaly generated, this", "target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print", "are inside any given polygon. The raster files have a # land usage", "layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d | %d | %d | %d\" % (c5,contard,conta,cont_loop)", "layer with features of interest. # # Author: leandro.biondo # # Created: 05/10/2016", "layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5 NoData_value = 255 contard =0 c5=0", "layerbr=None # dataSourcebr=None # layer_rd=None # dataSource_rd=None # target_ds= None # print 'fim", "and 0 if it is not. This mask is then multiplied # as", "values. # Having a catalog in a vector layer with adresses of images", "verifica_f cont_loop+=1 if geom2.Intersects(geom) : c5+=1 if (verifica_f is None): intersect = geom.Intersection(geom2)", "format = \"GTiff\" # driver2 = gdal.GetDriverByName( format ) # metadata = driver2.GetMetadata()", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False)", "layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\",", "osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) #", "binary values, # being 1 if the pixel is part of the intersection", "classes = banda_class geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef() # print 'proj ', src_ds.GetProjection()", "# driver2 = gdal.GetDriverByName( format ) # metadata = driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE)", "# and metadata[gdal.DCAP_CREATE] == 'YES': # print 'Driver %s supports Create() method.' %", "', layer_rd.GetSpatialRef() # print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0] y_max =", "of images related to each polygon, count # the pixels with given values", "= 5 NoData_value = 255 contard =0 c5=0 for feat_rd in layer_rd: caminho_img", "5 NoData_value = 255 contard =0 c5=0 for feat_rd in layer_rd: caminho_img =", "cont_loop=0 for feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat #print verifica_f", "import gdal import numpy as np from osgeo import ogr, osr import glob", "c5==10: # layer=None # dataSource=None # layerbr=None # dataSourcebr=None # layer_rd=None # dataSource_rd=None", "values). # Finally a histogram is made with bins that separate the intended", "except RuntimeError, e: # for example, try GetRasterBand(10) print 'Band ( %i )", "part of the intersection and 0 if it is not. This mask is", "%d\" % (c5,contard,conta,cont_loop) c5+=1 #create an image file and put the results in", "generated, this classification covers the # whole country. We have rural properties boundaries", "for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye = infile driver = ogr.GetDriverByName(\"ESRI Shapefile\")", "values, # being 1 if the pixel is part of the intersection and", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False)", "feat_rd.GetField(\"caminho\") print caminho_img try: src_ds = gdal.Open( caminho_img) except RuntimeError, e: print 'Unable", "#create an image file and put the results in 3 band for testing", "#print verifica_f cont_loop+=1 if geom2.Intersects(geom) : c5+=1 if (verifica_f is None): intersect =", "'Band ( %i ) not found' % band_num print e #sys.exit(1) continue banda_class", "band for testing purposes # # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format", "Raster information from vector relations # Purpose: Classify features of interest based on", "vector layer with the catalog, This catalog can be built with the Qgis", "relations # Purpose: Classify features of interest based on a raster with pixels", "for example, try GetRasterBand(10) print 'Band ( %i ) not found' % band_num", "#print 'feat' , caminho_feat #print verifica_f cont_loop+=1 if geom2.Intersects(geom) : c5+=1 if (verifica_f", ") not found' % band_num print e #sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float) if", "# format = \"GTiff\" # driver2 = gdal.GetDriverByName( format ) # metadata =", "poligons that we want to verify # how much area was classified as", "the matrix of pixel values from the image (in this case 14 possible", "in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye = infile driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd =", "geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS(", "polygon intersection with each feature of interest and builds a raster mask. #", "'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0] y_max = contorno[3] x_res = 5000", "each bin is added to the vector layer with features of interest. #", "255 contard =0 c5=0 for feat_rd in layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print caminho_img", "x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect)", "3 band for testing purposes # # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) #", "for feat_rd in layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print caminho_img try: src_ds = gdal.Open(", "select image boudary option. The path (caminho) field will be used to open", "srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) #", "layer=None # dataSource=None # layerbr=None # dataSourcebr=None # layer_rd=None # dataSource_rd=None # target_ds=", "print 'Unable to open INPUT' print e #break continue try: srcband = src_ds.GetRasterBand(1)", "Read as array dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray() #print tabela resposta1 = np.histogram(classes,", "driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE] == 'YES': # print 'Driver", "with each feature of interest and builds a raster mask. # The mask", "# dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None", "=0 c5=0 for feat_rd in layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print caminho_img try: src_ds", "the results in 3 band for testing purposes # # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\"", "numpy as np from osgeo import ogr, osr import glob import os gdal.UseExceptions()", "# # Created: 05/10/2016 # Copyright: (c) leandro.biondo 2016 # Licence: GNU GLP", "dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array =", "pixel values from the image (in this case 14 possible values). # Finally", "np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\",", ") # srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) #", "given polygon. The raster files have a # land usage classification that was", "layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print caminho_img try: src_ds = gdal.Open( caminho_img) except RuntimeError,", "country. We have rural properties boundaries and other poligons that we want to", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5 NoData_value = 255 contard =0 c5=0 for", "interest and builds a raster mask. # The mask has the same resolution", "#print np.histogram(array, bins=[0,1,250,300]) # Read as array dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray() #print", "# if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY] == 'YES': # print 'Driver %s", "int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25))", "= infile driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer()", "The path (caminho) field will be used to open #the images with classified", "print srcband except RuntimeError, e: # for example, try GetRasterBand(10) print 'Band (", "conta=0 cont_loop=0 for feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat #print", "dstlayer, burn_values=[1]) array = band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) # Read as array dstlayer=None", "continue try: srcband = src_ds.GetRasterBand(1) print srcband except RuntimeError, e: # for example,", "dataSourcebr.GetLayer() #Here should be given the vector layer with the catalog, This catalog", "y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) #", "want to verify # how much area was classified as being one of", "= contorno[3] x_res = 5000 y_res = 5000 # target_ds = gdal.GetDriverByName('MEM').Create('', x_res,", "a raster with pixels that have classification values. # Having a catalog in", "band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) # Read as array dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray()", "can be built with the Qgis plugin #\"Image Footprint\", it is necessary to", "it is not. This mask is then multiplied # as a matrix by", "adresses of images related to each polygon, count # the pixels with given", "driver.Open(shapefile, True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False)", "a vector layer with adresses of images related to each polygon, count #", "properties boundaries and other poligons that we want to verify # how much", "'Driver %s supports CreateCopy() method.' % format # # dst_ds = driver2.Create( saida,", "# if c5==10: # layer=None # dataSource=None # layerbr=None # dataSourcebr=None # layer_rd=None", "feature.Destroy() print \"ImagemImovel: %d | %d | %d | %d\" % (c5,contard,conta,cont_loop) c5+=1", "contorno=geom.GetEnvelope() x_min = contorno[0] y_max = contorno[3] x_res = 5000 y_res = 5000", "feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\",", "rapideye = infile driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye, True) layer_rd =", "as being one of 13 distinct classes. This aproach gets each # image", "geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat #print verifica_f cont_loop+=1 if geom2.Intersects(geom) : c5+=1", "# # # if c5==10: # layer=None # dataSource=None # layerbr=None # dataSourcebr=None", "(c) leandro.biondo 2016 # Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python # import modules", "if (verifica_f is None): intersect = geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5:", "possible values). # Finally a histogram is made with bins that separate the", "classes2 = classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25))", "05/10/2016 # Copyright: (c) leandro.biondo 2016 # Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python", "layer_rd=None # dataSource_rd=None # target_ds= None # print 'fim forcado' # break #", "and metadata[gdal.DCAP_CREATECOPY] == 'YES': # print 'Driver %s supports CreateCopy() method.' % format", "target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature", "= \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format = \"GTiff\" # driver2 = gdal.GetDriverByName( format", "infile rapideye = infile driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye, True) layer_rd", "import numpy as np from osgeo import ogr, osr import glob import os", "of # each bin is added to the vector layer with features of", "added to the vector layer with features of interest. # # Author: leandro.biondo", "more then 1 catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye = infile", "print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0] y_max = contorno[3] x_res =", "pixels, you can use a * as mask if there are more then", "# and metadata[gdal.DCAP_CREATECOPY] == 'YES': # print 'Driver %s supports CreateCopy() method.' %", "(verifica_f is None): intersect = geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue", "GetRasterBand(10) print 'Band ( %i ) not found' % band_num print e #sys.exit(1)", "# Purpose: Classify features of interest based on a raster with pixels that", "target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0 for feature", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5 NoData_value", "memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1)", "banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef() # print 'proj ',", "the same resolution as the original image (RapidEye, 5 meters) with binary values,", "we want to verify # how much area was classified as being one", "of interest. # # Author: leandro.biondo # # Created: 05/10/2016 # Copyright: (c)", "e #sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef() #print", "2016 # Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python # import modules import gdal", "= driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer() #Here should be given the vector layer", "metadata[gdal.DCAP_CREATE] == 'YES': # print 'Driver %s supports Create() method.' % format #", "this classification covers the # whole country. We have rural properties boundaries and", "cont_loop+=1 if geom2.Intersects(geom) : c5+=1 if (verifica_f is None): intersect = geom.Intersection(geom2) print", "#the images with classified pixels, you can use a * as mask if", "the intended classes and the count of # each bin is added to", "and put the results in 3 band for testing purposes # # saida", "raster mask. # The mask has the same resolution as the original image", "is not. This mask is then multiplied # as a matrix by the", "= band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) # Read as array dstlayer=None memsource.Destroy() #tabela =", "the image (in this case 14 possible values). # Finally a histogram is", "= gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature =", "%d | %d | %d | %d\" % (c5,contard,conta,cont_loop) c5+=1 #create an image", "have a # land usage classification that was automaticaly generated, this classification covers", "driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer() #Here should be given the vector layer with", "y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature)", "distinct classes. This aproach gets each # image boundary polygon intersection with each", "gdal.Open( caminho_img) except RuntimeError, e: print 'Unable to open INPUT' print e #break", "ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1])", "%d | %d\" % (c5,contard,conta,cont_loop) c5+=1 #create an image file and put the", "conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer =", "caminho_img try: src_ds = gdal.Open( caminho_img) except RuntimeError, e: print 'Unable to open", "will be used to open #the images with classified pixels, you can use", "with adresses of images related to each polygon, count # the pixels with", "the catalog, This catalog can be built with the Qgis plugin #\"Image Footprint\",", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False)", "with given values that are inside any given polygon. The raster files have", "purposes # # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format = \"GTiff\" #", "We have rural properties boundaries and other poligons that we want to verify", "of the intersection and 0 if it is not. This mask is then", "modules import gdal import numpy as np from osgeo import ogr, osr import", "catalog can be built with the Qgis plugin #\"Image Footprint\", it is necessary", "put the results in 3 band for testing purposes # # saida =", "# for example, try GetRasterBand(10) print 'Band ( %i ) not found' %", "% (c5,contard,conta,cont_loop) c5+=1 #create an image file and put the results in 3", "dataSource_rd=None # target_ds= None # print 'fim forcado' # break # target_ds= None", "meters) with binary values, # being 1 if the pixel is part of", "print e #break continue try: srcband = src_ds.GetRasterBand(1) print srcband except RuntimeError, e:", "#------------------------------------------------------------------------------- #!/usr/bin/env python # import modules import gdal import numpy as np from", "polygon, count # the pixels with given values that are inside any given", "layer_rd = dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True) layer = dataSource.GetLayer()", "%s supports CreateCopy() method.' % format # # dst_ds = driver2.Create( saida, 5000,", "# # Author: leandro.biondo # # Created: 05/10/2016 # Copyright: (c) leandro.biondo 2016", "target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0 for feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\")", "<filename>raster_info_car.py #------------------------------------------------------------------------------- # Name: Raster information from vector relations # Purpose: Classify features", "| %d | %d | %d\" % (c5,contard,conta,cont_loop) c5+=1 #create an image file", "any given polygon. The raster files have a # land usage classification that", "layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\",", "image boundary polygon intersection with each feature of interest and builds a raster", "array = band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) # Read as array dstlayer=None memsource.Destroy() #tabela", "format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY] == 'YES': # print 'Driver", "src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0] y_max = contorno[3] x_res = 5000 y_res =", "= driver2.Create( saida, 5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs = osr.SpatialReference()", "# # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # # # if", "not found' % band_num print e #sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000):", "dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # # # if c5==10: # layer=None # dataSource=None #", "layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False)", "print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array = band.ReadAsArray() #print", "can use a * as mask if there are more then 1 catalog", "int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25))", "#\"Image Footprint\", it is necessary to select image boudary option. The path (caminho)", "target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado',", "\"ImagemImovel: %d | %d | %d | %d\" % (c5,contard,conta,cont_loop) c5+=1 #create an", "testing purposes # # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format = \"GTiff\"", "contard=contard+1 conta=0 cont_loop=0 for feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat", "glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye = infile driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye,", "(intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1)", "layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\",", "RuntimeError, e: print 'Unable to open INPUT' print e #break continue try: srcband", "= gdal.GetDriverByName( format ) # metadata = driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\ #", "= dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\",", "%s supports Create() method.' % format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY]", "is then multiplied # as a matrix by the matrix of pixel values", "method.' % format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY] == 'YES': #", "the intersection and 0 if it is not. This mask is then multiplied", "This catalog can be built with the Qgis plugin #\"Image Footprint\", it is", "= 255 contard =0 c5=0 for feat_rd in layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print", "'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array = band.ReadAsArray() #print np.histogram(array,", "used to open #the images with classified pixels, you can use a *", "multiplied # as a matrix by the matrix of pixel values from the", "This mask is then multiplied # as a matrix by the matrix of", "# band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0 for feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print", "format # # dst_ds = driver2.Create( saida, 5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] )", "gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn())", "'fim forcado' # break # target_ds= None #break layer.ResetReading() layer=None dataSource=None layerbr=None dataSourcebr=None", "'Unable to open INPUT' print e #break continue try: srcband = src_ds.GetRasterBand(1) print", "int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25))", "= classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\",", "classes and the count of # each bin is added to the vector", "band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0 for feature in layer:", "\"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format = \"GTiff\" # driver2 = gdal.GetDriverByName( format )", "% format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY] == 'YES': # print", "results in 3 band for testing purposes # # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" %", "values from the image (in this case 14 possible values). # Finally a", "based on a raster with pixels that have classification values. # Having a", "be used to open #the images with classified pixels, you can use a", ") memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res,", "e #break continue try: srcband = src_ds.GetRasterBand(1) print srcband except RuntimeError, e: #", "# Author: leandro.biondo # # Created: 05/10/2016 # Copyright: (c) leandro.biondo 2016 #", "e: print 'Unable to open INPUT' print e #break continue try: srcband =", "GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python # import modules import gdal import numpy as", "= gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1)", "layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\",", "to open INPUT' print e #break continue try: srcband = src_ds.GetRasterBand(1) print srcband", "# #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True) #layerbr", "Having a catalog in a vector layer with adresses of images related to", "that have classification values. # Having a catalog in a vector layer with", "land usage classification that was automaticaly generated, this classification covers the # whole", "#Here should be given the vector layer with the catalog, This catalog can", "| %d\" % (c5,contard,conta,cont_loop) c5+=1 #create an image file and put the results", "(RapidEye, 5 meters) with binary values, # being 1 if the pixel is", "raster with pixels that have classification values. # Having a catalog in a", "print caminho_img try: src_ds = gdal.Open( caminho_img) except RuntimeError, e: print 'Unable to", "# break # target_ds= None #break layer.ResetReading() layer=None dataSource=None layerbr=None dataSourcebr=None layer_rd=None dataSource_rd=None", "layer with the catalog, This catalog can be built with the Qgis plugin", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size =", "classes. This aproach gets each # image boundary polygon intersection with each feature", "Created: 05/10/2016 # Copyright: (c) leandro.biondo 2016 # Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env", "#shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True) #layerbr =", "import modules import gdal import numpy as np from osgeo import ogr, osr", "The raster files have a # land usage classification that was automaticaly generated,", "CreateCopy() method.' % format # # dst_ds = driver2.Create( saida, 5000, 5000, 3,", "as array dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray() #print tabela resposta1 = np.histogram(classes, bins=[0,1,20])", "\\ # and metadata[gdal.DCAP_CREATECOPY] == 'YES': # print 'Driver %s supports CreateCopy() method.'", "banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef()", "(contard,c5) # format = \"GTiff\" # driver2 = gdal.GetDriverByName( format ) # metadata", "continue banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef() #print 'spat ',", "interest based on a raster with pixels that have classification values. # Having", "this case 14 possible values). # Finally a histogram is made with bins", "a matrix by the matrix of pixel values from the image (in this", "# Having a catalog in a vector layer with adresses of images related", "covers the # whole country. We have rural properties boundaries and other poligons", "and the count of # each bin is added to the vector layer", "be given the vector layer with the catalog, This catalog can be built", "= target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0 for feature in layer: geom2=feature.GetGeometryRef()", "glob import os gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr", "infile driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer() shapefile", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False)", "a catalog in a vector layer with adresses of images related to each", "if metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE] == 'YES': # print 'Driver %s supports", "the vector layer with the catalog, This catalog can be built with the", "pixels with given values that are inside any given polygon. The raster files", "# Name: Raster information from vector relations # Purpose: Classify features of interest", "be built with the Qgis plugin #\"Image Footprint\", it is necessary to select", "for feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat #print verifica_f cont_loop+=1", "# metadata = driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE] == 'YES':", "of pixel values from the image (in this case 14 possible values). #", "layer with adresses of images related to each polygon, count # the pixels", "catalog in a vector layer with adresses of images related to each polygon,", "5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform())", "import glob import os gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\")", "if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData',", "dst_ds=None # # # if c5==10: # layer=None # dataSource=None # layerbr=None #", "information from vector relations # Purpose: Classify features of interest based on a", "= 5000 # target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection())", "dataSource = driver.Open(shapefile, True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False)", "was automaticaly generated, this classification covers the # whole country. We have rural", "polygon. The raster files have a # land usage classification that was automaticaly", "5000 y_res = 5000 # target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform())", "then 1 catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye = infile driver", "y_max = contorno[3] x_res = 5000 y_res = 5000 # target_ds = gdal.GetDriverByName('MEM').Create('',", "= memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band =", ") # metadata = driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE] ==", "resolution as the original image (RapidEye, 5 meters) with binary values, # being", "Footprint\", it is necessary to select image boudary option. The path (caminho) field", "the vector layer with features of interest. # # Author: leandro.biondo # #", "gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1", "NoData_value = 255 contard =0 c5=0 for feat_rd in layer_rd: caminho_img = feat_rd.GetField(\"caminho\")", "have rural properties boundaries and other poligons that we want to verify #", "image (in this case 14 possible values). # Finally a histogram is made", "gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1) #", "method.' % format # # dst_ds = driver2.Create( saida, 5000, 5000, 3, gdal.GDT_Float32,", "leandro.biondo # # Created: 05/10/2016 # Copyright: (c) leandro.biondo 2016 # Licence: GNU", "1) dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection())", "example, try GetRasterBand(10) print 'Band ( %i ) not found' % band_num print", "with the Qgis plugin #\"Image Footprint\", it is necessary to select image boudary", "and metadata[gdal.DCAP_CREATE] == 'YES': # print 'Driver %s supports Create() method.' % format", "5000 # target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) #", "feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d | %d | %d | %d\"", "dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray() #print tabela resposta1 = np.histogram(classes, bins=[0,1,20]) classes2 =", "# print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min = contorno[0] y_max = contorno[3] x_res", "# dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # # # if c5==10: # layer=None", "# target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0 for", "#!/usr/bin/env python # import modules import gdal import numpy as np from osgeo", "# Read as array dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray() #print tabela resposta1 =", "print e #sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef()", "gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array = band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300]) # Read as", "int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d | %d | %d |", "supports CreateCopy() method.' % format # # dst_ds = driver2.Create( saida, 5000, 5000,", "= ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer() #Here should be", "= \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer()", "the # whole country. We have rural properties boundaries and other poligons that", "srcband = src_ds.GetRasterBand(1) print srcband except RuntimeError, e: # for example, try GetRasterBand(10)", "= srcband.ReadAsArray() #print tabela resposta1 = np.histogram(classes, bins=[0,1,20]) classes2 = classes*array resposta =", "intersection and 0 if it is not. This mask is then multiplied #", "path (caminho) field will be used to open #the images with classified pixels,", "\"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('',", "vector relations # Purpose: Classify features of interest based on a raster with", "in a vector layer with adresses of images related to each polygon, count", "with classified pixels, you can use a * as mask if there are", "# print 'fim forcado' # break # target_ds= None #break layer.ResetReading() layer=None dataSource=None", "has the same resolution as the original image (RapidEye, 5 meters) with binary", "to open #the images with classified pixels, you can use a * as", "infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye = infile driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd", "#break continue try: srcband = src_ds.GetRasterBand(1) print srcband except RuntimeError, e: # for", "print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\"", "layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat #print verifica_f cont_loop+=1 if geom2.Intersects(geom) :", "a # land usage classification that was automaticaly generated, this classification covers the", "= osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste', SpatialRef)", "#layerbr = dataSourcebr.GetLayer() #Here should be given the vector layer with the catalog,", "metadata = driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE] == 'YES': #", "layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\",", "os gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr,", "= feat_rd.GetField(\"caminho\") print caminho_img try: src_ds = gdal.Open( caminho_img) except RuntimeError, e: print", "contard =0 c5=0 for feat_rd in layer_rd: caminho_img = feat_rd.GetField(\"caminho\") print caminho_img try:", "gdal import numpy as np from osgeo import ogr, osr import glob import", "srcband except RuntimeError, e: # for example, try GetRasterBand(10) print 'Band ( %i", "== 'YES': # print 'Driver %s supports Create() method.' % format # if", "% format # # dst_ds = driver2.Create( saida, 5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW']", "target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0 cont_loop=0", "layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5 NoData_value =", "osgeo import ogr, osr import glob import os gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\"", "being one of 13 distinct classes. This aproach gets each # image boundary", "5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) #", "is added to the vector layer with features of interest. # # Author:", "ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource", "== 'YES': # print 'Driver %s supports CreateCopy() method.' % format # #", "# layer_rd=None # dataSource_rd=None # target_ds= None # print 'fim forcado' # break", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False)", "x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value)", "Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer() #Here should be given the", "#------------------------------------------------------------------------------- # Name: Raster information from vector relations # Purpose: Classify features of", "3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) #", "band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope()", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False)", "matrix of pixel values from the image (in this case 14 possible values).", "inside any given polygon. The raster files have a # land usage classification", "count # the pixels with given values that are inside any given polygon.", "pixels that have classification values. # Having a catalog in a vector layer", "#tabela = srcband.ReadAsArray() #print tabela resposta1 = np.histogram(classes, bins=[0,1,20]) classes2 = classes*array resposta", "# contard=contard+1 conta=0 cont_loop=0 for feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' ,", "if it is not. This mask is then multiplied # as a matrix", "memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res,", "original image (RapidEye, 5 meters) with binary values, # being 1 if the", "dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # # # if c5==10: # layer=None #", "int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy()", "being 1 if the pixel is part of the intersection and 0 if", "and other poligons that we want to verify # how much area was", "by the matrix of pixel values from the image (in this case 14", "# target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band = target_ds.GetRasterBand(1) # band.SetNoDataValue(NoData_value) # contard=contard+1 conta=0", "dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array = band.ReadAsArray() #print np.histogram(array, bins=[0,1,250,300])", "memsource.Destroy() #tabela = srcband.ReadAsArray() #print tabela resposta1 = np.histogram(classes, bins=[0,1,20]) classes2 = classes*array", "# print 'Driver %s supports Create() method.' % format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\", "of 13 distinct classes. This aproach gets each # image boundary polygon intersection", "that separate the intended classes and the count of # each bin is", "each polygon, count # the pixels with given values that are inside any", "target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize", "= driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer() shapefile = ('/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp') dataSource = driver.Open(shapefile, True)", "= driver.Open(shapefile, True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\",", "True) layer = dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\",", "ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer() #Here should be given", "layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\",", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"apicum\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"restinga\", ogr.OFTInteger),False)", "feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\",", "driver2.Create( saida, 5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs = osr.SpatialReference() #", "= gdal.Open( caminho_img) except RuntimeError, e: print 'Unable to open INPUT' print e", "dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band", ": c5+=1 if (verifica_f is None): intersect = geom.Intersection(geom2) print intersect.GetArea() print (intersect.GetArea()/geom2.GetArea())", "in 3 band for testing purposes # # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5)", "INPUT' print e #break continue try: srcband = src_ds.GetRasterBand(1) print srcband except RuntimeError,", "'YES': # print 'Driver %s supports CreateCopy() method.' % format # # dst_ds", "# land usage classification that was automaticaly generated, this classification covers the #", "bins=[0,1,250,300]) # Read as array dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray() #print tabela resposta1", "on a raster with pixels that have classification values. # Having a catalog", "python # import modules import gdal import numpy as np from osgeo import", "# dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array) # dst_ds.GetRasterBand(3).WriteArray(classes2) # dst_ds=None # #", "then multiplied # as a matrix by the matrix of pixel values from", "= contorno[0] y_max = contorno[3] x_res = 5000 y_res = 5000 # target_ds", "'feat' , caminho_feat #print verifica_f cont_loop+=1 if geom2.Intersects(geom) : c5+=1 if (verifica_f is", "# print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds, [1], dstlayer, burn_values=[1]) array = band.ReadAsArray()", "bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25))", "# Created: 05/10/2016 # Copyright: (c) leandro.biondo 2016 # Licence: GNU GLP #-------------------------------------------------------------------------------", "feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\",", "%i ) not found' % band_num print e #sys.exit(1) continue banda_class = srcband.ReadAsArray().astype(np.float)", "int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25))", "\\ # and metadata[gdal.DCAP_CREATE] == 'YES': # print 'Driver %s supports Create() method.'", "# # saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format = \"GTiff\" # driver2", "with features of interest. # # Author: leandro.biondo # # Created: 05/10/2016 #", "from osgeo import ogr, osr import glob import os gdal.UseExceptions() # #shapefilebr =", "# import modules import gdal import numpy as np from osgeo import ogr,", "are more then 1 catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile rapideye =", "Qgis plugin #\"Image Footprint\", it is necessary to select image boudary option. The", "gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # #", "memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte)", "# print 'Driver %s supports CreateCopy() method.' % format # # dst_ds =", "ogr.OFTInteger),False) pixel_size = 5 NoData_value = 255 contard =0 c5=0 for feat_rd in", "x_res = 5000 y_res = 5000 # target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte)", "dst_ds = driver2.Create( saida, 5000, 5000, 3, gdal.GDT_Float32, ['COMPRESS=LZW'] ) # srs =", "to verify # how much area was classified as being one of 13", "metadata[gdal.DCAP_CREATECOPY] == 'YES': # print 'Driver %s supports CreateCopy() method.' % format #", "dataSource.GetLayer() layer.CreateField(ogr.FieldDefn(\"indef\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"uso_cons\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"rvegnat\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vereda\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"mangue\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"salgado\", ogr.OFTInteger),False)", "matrix by the matrix of pixel values from the image (in this case", "# target_ds= None #break layer.ResetReading() layer=None dataSource=None layerbr=None dataSourcebr=None layer_rd=None dataSource_rd=None print 'fim'", "aproach gets each # image boundary polygon intersection with each feature of interest", "'Driver %s supports Create() method.' % format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and", "print 'Band ( %i ) not found' % band_num print e #sys.exit(1) continue", "# srs = osr.SpatialReference() # dst_ds.SetProjection(src_ds.GetProjection()) # dst_ds.SetGeoTransform(src_ds.GetGeoTransform()) # # dst_ds.GetRasterBand(1).WriteArray(classes) # dst_ds.GetRasterBand(2).WriteArray(array)", "= banda_class geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef() # print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope()", "one of 13 distinct classes. This aproach gets each # image boundary polygon", "bin is added to the vector layer with features of interest. # #", "a histogram is made with bins that separate the intended classes and the", "GLP #------------------------------------------------------------------------------- #!/usr/bin/env python # import modules import gdal import numpy as np", "option. The path (caminho) field will be used to open #the images with", "# target_ds = gdal.GetDriverByName('MEM').Create('', x_res, y_res, gdal.GDT_Byte) # target_ds.SetGeoTransform(src_ds.GetGeoTransform()) # target_ds.SetProjection(src_ds.GetProjection()) # band", "builds a raster mask. # The mask has the same resolution as the", "classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\", int(resposta1[0][0]*25)) feature.SetField(\"uso_cons\", int(resposta[0][1]*25)) feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25))", "0 if it is not. This mask is then multiplied # as a", "should be given the vector layer with the catalog, This catalog can be", "geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef() # print 'proj ', src_ds.GetProjection() contorno=geom.GetEnvelope() x_min =", "#driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer() #Here should", "# Finally a histogram is made with bins that separate the intended classes", "bins that separate the intended classes and the count of # each bin", "\"C:/biondo/buff_nasc.shp\" #driver = ogr.GetDriverByName(\"ESRI Shapefile\") #dataSourcebr = driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer() #Here", "feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\", int(resposta[0][10]*25)) feature.SetField(\"areaurb\", int(resposta[0][11]*25)) feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print", "layer.CreateField(ogr.FieldDefn(\"agua\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"vegremo\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"regene\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"areaurb\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size", "src_ds.GetRasterBand(1) print srcband except RuntimeError, e: # for example, try GetRasterBand(10) print 'Band", "as the original image (RapidEye, 5 meters) with binary values, # being 1", "case 14 possible values). # Finally a histogram is made with bins that", "band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() # Rasterize gdal.RasterizeLayer(target_ds,", "# Licence: GNU GLP #------------------------------------------------------------------------------- #!/usr/bin/env python # import modules import gdal import", "(caminho) field will be used to open #the images with classified pixels, you", "= dataSourcebr.GetLayer() #Here should be given the vector layer with the catalog, This", "with pixels that have classification values. # Having a catalog in a vector", "Purpose: Classify features of interest based on a raster with pixels that have", "is necessary to select image boudary option. The path (caminho) field will be", "intersect.GetArea() print (intersect.GetArea()/geom2.GetArea()) if (intersect.GetArea()/geom2.GetArea())<0.5: continue conta+=1 SpatialRef = osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" )", "osr.SpatialReference() SpatialRef.SetWellKnownGeogCS( \"EPSG:4674\" ) memoutdriver=ogr.GetDriverByName('MEMORY') memsource=memoutdriver.CreateDataSource('memData') tmp=memoutdriver.Open('memData', 1) dstlayer = memsource.CreateLayer('teste', SpatialRef) target_ds", "feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\",", "array dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray() #print tabela resposta1 = np.histogram(classes, bins=[0,1,20]) classes2", "format ) # metadata = driver2.GetMetadata() # if metadata.has_key(gdal.DCAP_CREATE) \\ # and metadata[gdal.DCAP_CREATE]", "metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY] == 'YES': # print 'Driver %s supports CreateCopy()", "import ogr, osr import glob import os gdal.UseExceptions() # #shapefilebr = \"C:/biondo/buff_nasc.shp\" #driver", "feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\", int(resposta[0][8]*25)) feature.SetField(\"vegremo\", int(resposta[0][9]*25)) feature.SetField(\"regene\",", "# dataSourcebr=None # layer_rd=None # dataSource_rd=None # target_ds= None # print 'fim forcado'", "count of # each bin is added to the vector layer with features", "try GetRasterBand(10) print 'Band ( %i ) not found' % band_num print e", "# image boundary polygon intersection with each feature of interest and builds a", "src_ds = gdal.Open( caminho_img) except RuntimeError, e: print 'Unable to open INPUT' print", "ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"nuvens\", ogr.OFTInteger),False) layer.CreateField(ogr.FieldDefn(\"foraLi\", ogr.OFTInteger),False) pixel_size = 5 NoData_value = 255 contard =0", "np.histogram(array, bins=[0,1,250,300]) # Read as array dstlayer=None memsource.Destroy() #tabela = srcband.ReadAsArray() #print tabela", "feature.SetField(\"rvegnat\", int(resposta[0][2]*25)) feature.SetField(\"vereda\", int(resposta[0][3]*25)) feature.SetField(\"mangue\", int(resposta[0][4]*25)) feature.SetField(\"salgado\", int(resposta[0][5]*25)) feature.SetField(\"apicum\", int(resposta[0][6]*25)) feature.SetField(\"restinga\", int(resposta[0][7]*25)) feature.SetField(\"agua\",", "# as a matrix by the matrix of pixel values from the image", "= \"GTiff\" # driver2 = gdal.GetDriverByName( format ) # metadata = driver2.GetMetadata() #", "c5+=1 #create an image file and put the results in 3 band for", "feature.SetField(\"nuvens\", int(resposta[0][12]*25)) feature.SetField(\"foraLi\", int((resposta[0][0]-resposta1[0][0])*25)) layer.SetFeature(feature) feature.Destroy() print \"ImagemImovel: %d | %d | %d", "an image file and put the results in 3 band for testing purposes", "to each polygon, count # the pixels with given values that are inside", "feature in layer: geom2=feature.GetGeometryRef() verifica_f=feature.GetField(\"foraLi\") #print 'feat' , caminho_feat #print verifica_f cont_loop+=1 if", "supports Create() method.' % format # if metadata.has_key(gdal.DCAP_CREATECOPY) \\ # and metadata[gdal.DCAP_CREATECOPY] ==", "None # print 'fim forcado' # break # target_ds= None #break layer.ResetReading() layer=None", "the pixels with given values that are inside any given polygon. The raster", "#dataSourcebr = driver.Open(shapefilebr, True) #layerbr = dataSourcebr.GetLayer() #Here should be given the vector", "srcband.ReadAsArray().astype(np.float) if banda_class.size==(5000*5000): classes = banda_class geom=feat_rd.GetGeometryRef() #print 'spat ', layer_rd.GetSpatialRef() # print", "if there are more then 1 catalog for infile in glob.glob(r'/home/gecad/CAR/Demandas/Nascentes/aaa_nascentes_catalogo.shp'): print infile", "gdal.GDT_Byte) target_ds.SetGeoTransform(src_ds.GetGeoTransform()) target_ds.SetProjection(src_ds.GetProjection()) band = target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) #", "same resolution as the original image (RapidEye, 5 meters) with binary values, #", "= target_ds.GetRasterBand(1) band.SetNoDataValue(NoData_value) dstfeature = ogr.Feature(dstlayer.GetLayerDefn()) dstfeature.SetGeometry(intersect) dstlayer.CreateFeature(dstfeature) # print 'resultado', dstfeature.GetGeometryRef().GetEnvelope() #", "vector layer with adresses of images related to each polygon, count # the", "much area was classified as being one of 13 distinct classes. This aproach", "saida = \"/home/gecad/CAR/Demandas/Nascentes/img_testes/img%d%d.tif\" % (contard,c5) # format = \"GTiff\" # driver2 = gdal.GetDriverByName(", "driver = ogr.GetDriverByName(\"ESRI Shapefile\") dataSource_rd = driver.Open(rapideye, True) layer_rd = dataSource_rd.GetLayer() shapefile =", "The mask has the same resolution as the original image (RapidEye, 5 meters)", "Author: leandro.biondo # # Created: 05/10/2016 # Copyright: (c) leandro.biondo 2016 # Licence:", "# being 1 if the pixel is part of the intersection and 0", "tabela resposta1 = np.histogram(classes, bins=[0,1,20]) classes2 = classes*array resposta = np.histogram(classes2, bins=[0,1,2,3,4,5,6,7,8,9,10,11,12,20]) feature.SetField(\"indef\"," ]
[ "self.year = movie_year self.poster_image_url = poster_image self.trailer_youtube_url = trailer_youtube self.rating = movie_rating def", "about movies.''' def __init__(self, movie_title, movie_year, poster_image, trailer_youtube, movie_rating): self.title = movie_title self.year", "information about movies.''' def __init__(self, movie_title, movie_year, poster_image, trailer_youtube, movie_rating): self.title = movie_title", "Movie(): '''This is a class for storing information about movies.''' def __init__(self, movie_title,", "movie_title, movie_year, poster_image, trailer_youtube, movie_rating): self.title = movie_title self.year = movie_year self.poster_image_url =", "poster_image, trailer_youtube, movie_rating): self.title = movie_title self.year = movie_year self.poster_image_url = poster_image self.trailer_youtube_url", "= poster_image self.trailer_youtube_url = trailer_youtube self.rating = movie_rating def show_trailer(self): '''This method opens", "trailer_youtube self.rating = movie_rating def show_trailer(self): '''This method opens a youtube url.''' webbrowser.open(self.trailer_youtube_url)", "self.trailer_youtube_url = trailer_youtube self.rating = movie_rating def show_trailer(self): '''This method opens a youtube", "poster_image self.trailer_youtube_url = trailer_youtube self.rating = movie_rating def show_trailer(self): '''This method opens a", "webbrowser class Movie(): '''This is a class for storing information about movies.''' def", "class for storing information about movies.''' def __init__(self, movie_title, movie_year, poster_image, trailer_youtube, movie_rating):", "self.poster_image_url = poster_image self.trailer_youtube_url = trailer_youtube self.rating = movie_rating def show_trailer(self): '''This method", "a class for storing information about movies.''' def __init__(self, movie_title, movie_year, poster_image, trailer_youtube,", "= movie_title self.year = movie_year self.poster_image_url = poster_image self.trailer_youtube_url = trailer_youtube self.rating =", "movie_rating): self.title = movie_title self.year = movie_year self.poster_image_url = poster_image self.trailer_youtube_url = trailer_youtube", "class Movie(): '''This is a class for storing information about movies.''' def __init__(self,", "trailer_youtube, movie_rating): self.title = movie_title self.year = movie_year self.poster_image_url = poster_image self.trailer_youtube_url =", "'''This is a class for storing information about movies.''' def __init__(self, movie_title, movie_year,", "storing information about movies.''' def __init__(self, movie_title, movie_year, poster_image, trailer_youtube, movie_rating): self.title =", "= movie_year self.poster_image_url = poster_image self.trailer_youtube_url = trailer_youtube self.rating = movie_rating def show_trailer(self):", "movies.''' def __init__(self, movie_title, movie_year, poster_image, trailer_youtube, movie_rating): self.title = movie_title self.year =", "__init__(self, movie_title, movie_year, poster_image, trailer_youtube, movie_rating): self.title = movie_title self.year = movie_year self.poster_image_url", "movie_year self.poster_image_url = poster_image self.trailer_youtube_url = trailer_youtube self.rating = movie_rating def show_trailer(self): '''This", "for storing information about movies.''' def __init__(self, movie_title, movie_year, poster_image, trailer_youtube, movie_rating): self.title", "= trailer_youtube self.rating = movie_rating def show_trailer(self): '''This method opens a youtube url.'''", "def __init__(self, movie_title, movie_year, poster_image, trailer_youtube, movie_rating): self.title = movie_title self.year = movie_year", "movie_year, poster_image, trailer_youtube, movie_rating): self.title = movie_title self.year = movie_year self.poster_image_url = poster_image", "movie_title self.year = movie_year self.poster_image_url = poster_image self.trailer_youtube_url = trailer_youtube self.rating = movie_rating", "is a class for storing information about movies.''' def __init__(self, movie_title, movie_year, poster_image,", "import webbrowser class Movie(): '''This is a class for storing information about movies.'''", "self.title = movie_title self.year = movie_year self.poster_image_url = poster_image self.trailer_youtube_url = trailer_youtube self.rating" ]
[ "reading molecular files \"\"\" import numpy as np import matplotlib.pyplot as plt def", "= np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols = xyz_file[:, 0] coords = xyz_file[:, 1:] coords", "or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x) for x in l[30:55].split()] c.append(c2)", "Functions for reading molecular files \"\"\" import numpy as np import matplotlib.pyplot as", "l[0:6] or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x) for x in l[30:55].split()]", "<gh_stars>0 \"\"\" Functions for reading molecular files \"\"\" import numpy as np import", "f.readlines() c = [] sym = [] for l in data: if \"ATOM\"", "= xyz_file[:, 0] coords = xyz_file[:, 1:] coords = coords.astype(np.float) return symbols, coords", "plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This function reads in a pdb file", "c.append(c2) coords = np.array(c) return sym, coords def read_xyz(file_location): #Open an xyz file", "function reads in a pdb file and returns the atom names and coordinates.", "\"\"\" import numpy as np import matplotlib.pyplot as plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]:", "\"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x) for x in l[30:55].split()] c.append(c2) coords", "\"\"\" Functions for reading molecular files \"\"\" import numpy as np import matplotlib.pyplot", "in a pdb file and returns the atom names and coordinates. with open(f_loc)", "coords = np.array(c) return sym, coords def read_xyz(file_location): #Open an xyz file and", "and coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols = xyz_file[:, 0] coords =", "c2 = [float(x) for x in l[30:55].split()] c.append(c2) coords = np.array(c) return sym,", "def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This function reads in a pdb file and", "[] for l in data: if \"ATOM\" in l[0:6] or \"HETATM\" in l[0:6]:", "molecular files \"\"\" import numpy as np import matplotlib.pyplot as plt def read_pdb(f_loc:", "symbols and coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols = xyz_file[:, 0] coords", "file and return symbols and coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols =", "#Open an xyz file and return symbols and coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2,", "c = [] sym = [] for l in data: if \"ATOM\" in", "dtype=\"unicode\") symbols = xyz_file[:, 0] coords = xyz_file[:, 1:] coords = coords.astype(np.float) return", "return sym, coords def read_xyz(file_location): #Open an xyz file and return symbols and", "in l[0:6] or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x) for x in", "a pdb file and returns the atom names and coordinates. with open(f_loc) as", "coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols = xyz_file[:, 0] coords = xyz_file[:,", "f: data = f.readlines() c = [] sym = [] for l in", "as np import matplotlib.pyplot as plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This function", "return symbols and coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols = xyz_file[:, 0]", "data = f.readlines() c = [] sym = [] for l in data:", "skip_header=2, dtype=\"unicode\") symbols = xyz_file[:, 0] coords = xyz_file[:, 1:] coords = coords.astype(np.float)", "l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x) for x in l[30:55].split()] c.append(c2) coords = np.array(c)", "import numpy as np import matplotlib.pyplot as plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: #", "file and returns the atom names and coordinates. with open(f_loc) as f: data", "str)->tuple[list[str], np.ndarray]: # This function reads in a pdb file and returns the", "numpy as np import matplotlib.pyplot as plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This", "with open(f_loc) as f: data = f.readlines() c = [] sym = []", "l[30:55].split()] c.append(c2) coords = np.array(c) return sym, coords def read_xyz(file_location): #Open an xyz", "matplotlib.pyplot as plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This function reads in a", "and coordinates. with open(f_loc) as f: data = f.readlines() c = [] sym", "= [] for l in data: if \"ATOM\" in l[0:6] or \"HETATM\" in", "coords def read_xyz(file_location): #Open an xyz file and return symbols and coordinates xyz_file", "x in l[30:55].split()] c.append(c2) coords = np.array(c) return sym, coords def read_xyz(file_location): #Open", "def read_xyz(file_location): #Open an xyz file and return symbols and coordinates xyz_file =", "[] sym = [] for l in data: if \"ATOM\" in l[0:6] or", "sym = [] for l in data: if \"ATOM\" in l[0:6] or \"HETATM\"", "np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols = xyz_file[:, 0] coords = xyz_file[:, 1:] coords =", "if \"ATOM\" in l[0:6] or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x) for", "an xyz file and return symbols and coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\")", "open(f_loc) as f: data = f.readlines() c = [] sym = [] for", "files \"\"\" import numpy as np import matplotlib.pyplot as plt def read_pdb(f_loc: str)->tuple[list[str],", "np.array(c) return sym, coords def read_xyz(file_location): #Open an xyz file and return symbols", "np.ndarray]: # This function reads in a pdb file and returns the atom", "for x in l[30:55].split()] c.append(c2) coords = np.array(c) return sym, coords def read_xyz(file_location):", "read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This function reads in a pdb file and returns", "This function reads in a pdb file and returns the atom names and", "= [float(x) for x in l[30:55].split()] c.append(c2) coords = np.array(c) return sym, coords", "import matplotlib.pyplot as plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This function reads in", "in l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x) for x in l[30:55].split()] c.append(c2) coords =", "in l[30:55].split()] c.append(c2) coords = np.array(c) return sym, coords def read_xyz(file_location): #Open an", "names and coordinates. with open(f_loc) as f: data = f.readlines() c = []", "and return symbols and coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols = xyz_file[:,", "in data: if \"ATOM\" in l[0:6] or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2 =", "data: if \"ATOM\" in l[0:6] or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x)", "as f: data = f.readlines() c = [] sym = [] for l", "coordinates. with open(f_loc) as f: data = f.readlines() c = [] sym =", "sym.append(l[76:79].strip()) c2 = [float(x) for x in l[30:55].split()] c.append(c2) coords = np.array(c) return", "= f.readlines() c = [] sym = [] for l in data: if", "np import matplotlib.pyplot as plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This function reads", "= np.array(c) return sym, coords def read_xyz(file_location): #Open an xyz file and return", "for reading molecular files \"\"\" import numpy as np import matplotlib.pyplot as plt", "xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols = xyz_file[:, 0] coords = xyz_file[:, 1:]", "# This function reads in a pdb file and returns the atom names", "\"ATOM\" in l[0:6] or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2 = [float(x) for x", "symbols = xyz_file[:, 0] coords = xyz_file[:, 1:] coords = coords.astype(np.float) return symbols,", "reads in a pdb file and returns the atom names and coordinates. with", "and returns the atom names and coordinates. with open(f_loc) as f: data =", "[float(x) for x in l[30:55].split()] c.append(c2) coords = np.array(c) return sym, coords def", "for l in data: if \"ATOM\" in l[0:6] or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip())", "read_xyz(file_location): #Open an xyz file and return symbols and coordinates xyz_file = np.genfromtxt(fname=file_location,", "pdb file and returns the atom names and coordinates. with open(f_loc) as f:", "the atom names and coordinates. with open(f_loc) as f: data = f.readlines() c", "sym, coords def read_xyz(file_location): #Open an xyz file and return symbols and coordinates", "l in data: if \"ATOM\" in l[0:6] or \"HETATM\" in l[0:6]: sym.append(l[76:79].strip()) c2", "= [] sym = [] for l in data: if \"ATOM\" in l[0:6]", "as plt def read_pdb(f_loc: str)->tuple[list[str], np.ndarray]: # This function reads in a pdb", "atom names and coordinates. with open(f_loc) as f: data = f.readlines() c =", "returns the atom names and coordinates. with open(f_loc) as f: data = f.readlines()", "xyz file and return symbols and coordinates xyz_file = np.genfromtxt(fname=file_location, skip_header=2, dtype=\"unicode\") symbols" ]
[ "1610612766, # Old name. } \"\"\" Play-by-play data has an EventMsgType field. This", "# Old name. 'NOH': 1610612740, # Old name. 'CHI': 1610612741, 'DAL': 1610612742, 'DEN':", "'ATL' 1610612738, # 'BOS' 1610612739, # 'CLE' 1610612740, # 'NOP' 1610612741, # 'CHI'", "} \"\"\" Team IDs. (Thank you nba-api). \"\"\" team_ids = [ 1610612737, #", "of (EventMsgType, SubType). We're going to make a lookup table of enum to", "you nba-api). \"\"\" team_ids = [ 1610612737, # 'ATL' 1610612738, # 'BOS' 1610612739,", "1610612762, # 'UTA' 1610612763, # 'MEM' 1610612764, # 'WAS' 1610612765, # 'DET' 1610612766,", "an enum. There is also the EventMsgActionField, which is a complex enum of", "Team IDs. (Thank you nba-api). \"\"\" team_ids = [ 1610612737, # 'ATL' 1610612738,", "[ {'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'},", "'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'},", "table of enum to value, then a lookup table for the (EventMsgType, EventMsgActionType)", "'MEM' 1610612764, # 'WAS' 1610612765, # 'DET' 1610612766, # 'CHA' ] \"\"\" Mapping", "\"\"\" \"\"\" List of seasons. \"\"\" season_list = [ '1996-97', '1997-98', '1998-99', '1999-00',", "team abbrev to id. \"\"\" team_abbrev_mapping = { 'ATL': 1610612737, 'BOS': 1610612738, 'CLE':", "# 'OKC' 1610612761, # 'TOR' 1610612762, # 'UTA' 1610612763, # 'MEM' 1610612764, #", "# 'SAC' 1610612759, # 'SAS' 1610612760, # 'OKC' 1610612761, # 'TOR' 1610612762, #", "'IND' 1610612755, # 'PHI' 1610612756, # 'PHX' 1610612757, # 'POR' 1610612758, # 'SAC'", "'HOU': 1610612745, 'LAC': 1610612746, 'LAL': 1610612747, 'MIA': 1610612748, 'MIL': 1610612749, 'MIN': 1610612750, 'BKN':", "'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'}, {'id': 5, 'string': 'TURNOVER'},", "deflate, br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team IDs. (Thank you nba-api). \"\"\" team_ids", "1610612740, # Old name. 'CHI': 1610612741, 'DAL': 1610612742, 'DEN': 1610612743, 'GSW': 1610612744, 'HOU':", "'2005-06', '2006-07', '2007-08', '2008-09', '2009-10', '2010-11', '2011-12', '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18',", "'en-US,en;q=0.9', } \"\"\" Team IDs. (Thank you nba-api). \"\"\" team_ids = [ 1610612737,", "# 'NOP' 1610612741, # 'CHI' 1610612742, # 'DAL' 1610612743, # 'DEN' 1610612744, #", "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors',", "Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like", "'CHI': 1610612741, 'DAL': 1610612742, 'DEN': 1610612743, 'GSW': 1610612744, 'HOU': 1610612745, 'LAC': 1610612746, 'LAL':", "1610612754, # 'IND' 1610612755, # 'PHI' 1610612756, # 'PHX' 1610612757, # 'POR' 1610612758,", "'MIA' 1610612749, # 'MIL' 1610612750, # 'MIN' 1610612751, # 'BKN' 1610612752, # 'NYK'", "'PHI' 1610612756, # 'PHX' 1610612757, # 'POR' 1610612758, # 'SAC' 1610612759, # 'SAS'", "application. \"\"\" \"\"\" List of seasons. \"\"\" season_list = [ '1996-97', '1997-98', '1998-99',", "'string': 'TURNOVER'}, {'id': 6, 'string': 'FOUL'}, {'id': 7, 'string': 'VIOLATION'}, {'id': 8, 'string':", "'ORL' 1610612754, # 'IND' 1610612755, # 'PHI' 1610612756, # 'PHX' 1610612757, # 'POR'", "which is a complex enum of (EventMsgType, SubType). We're going to make a", "1610612765, 'CHA': 1610612766, 'CHH': 1610612766, # Old name. } \"\"\" Play-by-play data has", "1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'}, {'id': 4,", "'1998-99', '1999-00', '2000-01', '2001-02', '2002-03', '2003-04', '2004-05', '2005-06', '2006-07', '2007-08', '2008-09', '2009-10', '2010-11',", "'LAC': 1610612746, 'LAL': 1610612747, 'MIA': 1610612748, 'MIL': 1610612749, 'MIN': 1610612750, 'BKN': 1610612751, 'NJN':", "'NYK' 1610612753, # 'ORL' 1610612754, # 'IND' 1610612755, # 'PHI' 1610612756, # 'PHX'", "1610612760, 'SEA': 1610612760, 'TOR': 1610612761, 'UTA': 1610612762, 'VAN': 1610612763, # Old name. 'MEM':", "1610612764, 'DET': 1610612765, 'CHA': 1610612766, 'CHH': 1610612766, # Old name. } \"\"\" Play-by-play", "'2000-01', '2001-02', '2002-03', '2003-04', '2004-05', '2005-06', '2006-07', '2007-08', '2008-09', '2009-10', '2010-11', '2011-12', '2012-13',", "'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel Mac OS", "'MIL': 1610612749, 'MIN': 1610612750, 'BKN': 1610612751, 'NJN': 1610612751, # Old name. 'NYK': 1610612752,", "'2004-05', '2005-06', '2006-07', '2007-08', '2008-09', '2009-10', '2010-11', '2011-12', '2012-13', '2013-14', '2014-15', '2015-16', '2016-17',", "'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team IDs. (Thank you nba-api).", "8, 'string': 'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'}, {'id': 11,", "{'id': 6, 'string': 'FOUL'}, {'id': 7, 'string': 'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'}, {'id':", "a complex enum of (EventMsgType, SubType). We're going to make a lookup table", "'PHX' 1610612757, # 'POR' 1610612758, # 'SAC' 1610612759, # 'SAS' 1610612760, # 'OKC'", "# 'TOR' 1610612762, # 'UTA' 1610612763, # 'MEM' 1610612764, # 'WAS' 1610612765, #", "Mapping from team abbrev to id. \"\"\" team_abbrev_mapping = { 'ATL': 1610612737, 'BOS':", "headers = { 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent': (", "1610612753, 'IND': 1610612754, 'PHI': 1610612755, 'PHX': 1610612756, 'POR': 1610612757, 'SAC': 1610612758, 'SAS': 1610612759,", "} \"\"\" Play-by-play data has an EventMsgType field. This is an enum. There", "nba-api). \"\"\" team_ids = [ 1610612737, # 'ATL' 1610612738, # 'BOS' 1610612739, #", "1610612751, # Old name. 'NYK': 1610612752, 'ORL': 1610612753, 'IND': 1610612754, 'PHI': 1610612755, 'PHX':", "1610612737, # 'ATL' 1610612738, # 'BOS' 1610612739, # 'CLE' 1610612740, # 'NOP' 1610612741,", "1610612740, # 'NOP' 1610612741, # 'CHI' 1610612742, # 'DAL' 1610612743, # 'DEN' 1610612744,", "1610612754, 'PHI': 1610612755, 'PHX': 1610612756, 'POR': 1610612757, 'SAC': 1610612758, 'SAS': 1610612759, 'OKC': 1610612760,", "'LAL' 1610612748, # 'MIA' 1610612749, # 'MIL' 1610612750, # 'MIN' 1610612751, # 'BKN'", "the (EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types = [ {'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id':", "\"\"\" Team IDs. (Thank you nba-api). \"\"\" team_ids = [ 1610612737, # 'ATL'", "{'id': 5, 'string': 'TURNOVER'}, {'id': 6, 'string': 'FOUL'}, {'id': 7, 'string': 'VIOLATION'}, {'id':", "'1996-97', '1997-98', '1998-99', '1999-00', '2000-01', '2001-02', '2002-03', '2003-04', '2004-05', '2005-06', '2006-07', '2007-08', '2008-09',", "'DEN': 1610612743, 'GSW': 1610612744, 'HOU': 1610612745, 'LAC': 1610612746, 'LAL': 1610612747, 'MIA': 1610612748, 'MIL':", "{'id': 7, 'string': 'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'}, {'id':", "10_14_6) ' #'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS", "# 'CHA' ] \"\"\" Mapping from team abbrev to id. \"\"\" team_abbrev_mapping =", "1610612746, # 'LAC' 1610612747, # 'LAL' 1610612748, # 'MIA' 1610612749, # 'MIL' 1610612750,", "'string': 'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'}, {'id': 13, 'string':", "\"\"\" Mapping from team abbrev to id. \"\"\" team_abbrev_mapping = { 'ATL': 1610612737,", "'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9',", "'LAC' 1610612747, # 'LAL' 1610612748, # 'MIA' 1610612749, # 'MIL' 1610612750, # 'MIN'", "# 'LAL' 1610612748, # 'MIA' 1610612749, # 'MIL' 1610612750, # 'MIN' 1610612751, #", "'ATL': 1610612737, 'BOS': 1610612738, 'CLE': 1610612739, 'NOP': 1610612740, 'NOK': 1610612740, # Old name.", "1610612737, 'BOS': 1610612738, 'CLE': 1610612739, 'NOP': 1610612740, 'NOK': 1610612740, # Old name. 'NOH':", "= { 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0", "'UTA' 1610612763, # 'MEM' 1610612764, # 'WAS' 1610612765, # 'DET' 1610612766, # 'CHA'", "{'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'}, {'id':", "X 10_14_6) ' #'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac", "'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team", "1610612748, # 'MIA' 1610612749, # 'MIL' 1610612750, # 'MIN' 1610612751, # 'BKN' 1610612752,", "{ 'ATL': 1610612737, 'BOS': 1610612738, 'CLE': 1610612739, 'NOP': 1610612740, 'NOK': 1610612740, # Old", "'BOS' 1610612739, # 'CLE' 1610612740, # 'NOP' 1610612741, # 'CHI' 1610612742, # 'DAL'", "'string': 'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'}, {'id': 12, 'string':", "'string': 'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'}, {'id': 13, 'string': 'PERIOD_END'}, {'id': 18, 'string':", "Intel Mac OS X 10_14_6) ' #'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0", "'PHX': 1610612756, 'POR': 1610612757, 'SAC': 1610612758, 'SAS': 1610612759, 'OKC': 1610612760, 'SEA': 1610612760, 'TOR':", "Old name. 'NOH': 1610612740, # Old name. 'CHI': 1610612741, 'DAL': 1610612742, 'DEN': 1610612743,", "'BKN': 1610612751, 'NJN': 1610612751, # Old name. 'NYK': 1610612752, 'ORL': 1610612753, 'IND': 1610612754,", "3, 'string': 'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'}, {'id': 5, 'string': 'TURNOVER'}, {'id': 6,", "'DET': 1610612765, 'CHA': 1610612766, 'CHH': 1610612766, # Old name. } \"\"\" Play-by-play data", "(Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36' ),", "1610612756, 'POR': 1610612757, 'SAC': 1610612758, 'SAS': 1610612759, 'OKC': 1610612760, 'SEA': 1610612760, 'TOR': 1610612761,", "{'id': 10, 'string': 'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'}, {'id':", "\"\"\" Constants used in the application. \"\"\" \"\"\" List of seasons. \"\"\" season_list", "1610612745, 'LAC': 1610612746, 'LAL': 1610612747, 'MIA': 1610612748, 'MIL': 1610612749, 'MIN': 1610612750, 'BKN': 1610612751,", "'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'}, {'id': 13, 'string': 'PERIOD_END'}, {'id': 18, 'string': 'UNKNOWN'}", "'TOR': 1610612761, 'UTA': 1610612762, 'VAN': 1610612763, # Old name. 'MEM': 1610612763, 'WAS': 1610612764,", "*/*', 'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) '", "value, then a lookup table for the (EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types =", "' #'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS X", "# 'NYK' 1610612753, # 'ORL' 1610612754, # 'IND' 1610612755, # 'PHI' 1610612756, #", "There is also the EventMsgActionField, which is a complex enum of (EventMsgType, SubType).", "text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6)", "'NOH': 1610612740, # Old name. 'CHI': 1610612741, 'DAL': 1610612742, 'DEN': 1610612743, 'GSW': 1610612744,", "'2015-16', '2016-17', '2017-18', '2018-19', '2019-20', '2020-21', '2021-22' ] \"\"\" Headers. \"\"\" headers =", "EventMsgType field. This is an enum. There is also the EventMsgActionField, which is", "# 'MIL' 1610612750, # 'MIN' 1610612751, # 'BKN' 1610612752, # 'NYK' 1610612753, #", "'1999-00', '2000-01', '2001-02', '2002-03', '2003-04', '2004-05', '2005-06', '2006-07', '2007-08', '2008-09', '2009-10', '2010-11', '2011-12',", "an EventMsgType field. This is an enum. There is also the EventMsgActionField, which", "'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language':", "'WAS' 1610612765, # 'DET' 1610612766, # 'CHA' ] \"\"\" Mapping from team abbrev", "lookup table for the (EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types = [ {'id': 1,", "1610612764, # 'WAS' 1610612765, # 'DET' 1610612766, # 'CHA' ] \"\"\" Mapping from", "# 'CLE' 1610612740, # 'NOP' 1610612741, # 'CHI' 1610612742, # 'DAL' 1610612743, #", "name. 'CHI': 1610612741, 'DAL': 1610612742, 'DEN': 1610612743, 'GSW': 1610612744, 'HOU': 1610612745, 'LAC': 1610612746,", "'2019-20', '2020-21', '2021-22' ] \"\"\" Headers. \"\"\" headers = { 'Connection': 'keep-alive', 'Accept':", "'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', }", "] \"\"\" Headers. \"\"\" headers = { 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*',", "4, 'string': 'REBOUND'}, {'id': 5, 'string': 'TURNOVER'}, {'id': 6, 'string': 'FOUL'}, {'id': 7,", "'2011-12', '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20', '2020-21', '2021-22' ] \"\"\"", "1610612756, # 'PHX' 1610612757, # 'POR' 1610612758, # 'SAC' 1610612759, # 'SAS' 1610612760,", "# 'ATL' 1610612738, # 'BOS' 1610612739, # 'CLE' 1610612740, # 'NOP' 1610612741, #", "1610612741, 'DAL': 1610612742, 'DEN': 1610612743, 'GSW': 1610612744, 'HOU': 1610612745, 'LAC': 1610612746, 'LAL': 1610612747,", "event_message_types = [ {'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3,", "1610612759, 'OKC': 1610612760, 'SEA': 1610612760, 'TOR': 1610612761, 'UTA': 1610612762, 'VAN': 1610612763, # Old", "name. 'NOH': 1610612740, # Old name. 'CHI': 1610612741, 'DAL': 1610612742, 'DEN': 1610612743, 'GSW':", "'application/json, text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel Mac OS X", "1610612765, # 'DET' 1610612766, # 'CHA' ] \"\"\" Mapping from team abbrev to", "\"\"\" event_message_types = [ {'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id':", "'MIN' 1610612751, # 'BKN' 1610612752, # 'NYK' 1610612753, # 'ORL' 1610612754, # 'IND'", "'string': 'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'}, {'id': 11, 'string':", "# Old name. 'MEM': 1610612763, 'WAS': 1610612764, 'DET': 1610612765, 'CHA': 1610612766, 'CHH': 1610612766,", "'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team IDs. (Thank you", "1610612766, # 'CHA' ] \"\"\" Mapping from team abbrev to id. \"\"\" team_abbrev_mapping", "{'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'}, {'id':", "'IND': 1610612754, 'PHI': 1610612755, 'PHX': 1610612756, 'POR': 1610612757, 'SAC': 1610612758, 'SAS': 1610612759, 'OKC':", "1610612759, # 'SAS' 1610612760, # 'OKC' 1610612761, # 'TOR' 1610612762, # 'UTA' 1610612763,", "(Thank you nba-api). \"\"\" team_ids = [ 1610612737, # 'ATL' 1610612738, # 'BOS'", "= { 'ATL': 1610612737, 'BOS': 1610612738, 'CLE': 1610612739, 'NOP': 1610612740, 'NOK': 1610612740, #", "to value, then a lookup table for the (EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types", "'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) ' #'AppleWebKit/537.36 (KHTML,", "'FOUL'}, {'id': 7, 'string': 'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'},", "\"\"\" season_list = [ '1996-97', '1997-98', '1998-99', '1999-00', '2000-01', '2001-02', '2002-03', '2003-04', '2004-05',", "'2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20', '2020-21', '2021-22' ] \"\"\" Headers. \"\"\" headers", "seasons. \"\"\" season_list = [ '1996-97', '1997-98', '1998-99', '1999-00', '2000-01', '2001-02', '2002-03', '2003-04',", "5, 'string': 'TURNOVER'}, {'id': 6, 'string': 'FOUL'}, {'id': 7, 'string': 'VIOLATION'}, {'id': 8,", "IDs. (Thank you nba-api). \"\"\" team_ids = [ 1610612737, # 'ATL' 1610612738, #", "1610612751, # 'BKN' 1610612752, # 'NYK' 1610612753, # 'ORL' 1610612754, # 'IND' 1610612755,", "6, 'string': 'FOUL'}, {'id': 7, 'string': 'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'}, {'id': 9,", "'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'},", "team_ids = [ 1610612737, # 'ATL' 1610612738, # 'BOS' 1610612739, # 'CLE' 1610612740,", "1610612763, # Old name. 'MEM': 1610612763, 'WAS': 1610612764, 'DET': 1610612765, 'CHA': 1610612766, 'CHH':", "is also the EventMsgActionField, which is a complex enum of (EventMsgType, SubType). We're", "'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) ' #'AppleWebKit/537.36", "List of seasons. \"\"\" season_list = [ '1996-97', '1997-98', '1998-99', '1999-00', '2000-01', '2001-02',", "# 'MIA' 1610612749, # 'MIL' 1610612750, # 'MIN' 1610612751, # 'BKN' 1610612752, #", "1610612747, 'MIA': 1610612748, 'MIL': 1610612749, 'MIN': 1610612750, 'BKN': 1610612751, 'NJN': 1610612751, # Old", "Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko)", "'BOS': 1610612738, 'CLE': 1610612739, 'NOP': 1610612740, 'NOK': 1610612740, # Old name. 'NOH': 1610612740,", "'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team IDs.", "1610612744, # 'GSW' 1610612745, # 'HOU' 1610612746, # 'LAC' 1610612747, # 'LAL' 1610612748,", "Old name. 'MEM': 1610612763, 'WAS': 1610612764, 'DET': 1610612765, 'CHA': 1610612766, 'CHH': 1610612766, #", "EventMsgActionType) pair. \"\"\" event_message_types = [ {'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string':", "Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats',", "like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML,", "<gh_stars>100-1000 \"\"\" Constants used in the application. \"\"\" \"\"\" List of seasons. \"\"\"", "# 'MIN' 1610612751, # 'BKN' 1610612752, # 'NYK' 1610612753, # 'ORL' 1610612754, #", "'TOR' 1610612762, # 'UTA' 1610612763, # 'MEM' 1610612764, # 'WAS' 1610612765, # 'DET'", "'2017-18', '2018-19', '2019-20', '2020-21', '2021-22' ] \"\"\" Headers. \"\"\" headers = { 'Connection':", "'CHI' 1610612742, # 'DAL' 1610612743, # 'DEN' 1610612744, # 'GSW' 1610612745, # 'HOU'", "'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel", "'GSW' 1610612745, # 'HOU' 1610612746, # 'LAC' 1610612747, # 'LAL' 1610612748, # 'MIA'", "'MIA': 1610612748, 'MIL': 1610612749, 'MIN': 1610612750, 'BKN': 1610612751, 'NJN': 1610612751, # Old name.", "to id. \"\"\" team_abbrev_mapping = { 'ATL': 1610612737, 'BOS': 1610612738, 'CLE': 1610612739, 'NOP':", "a lookup table of enum to value, then a lookup table for the", "'2002-03', '2003-04', '2004-05', '2005-06', '2006-07', '2007-08', '2008-09', '2009-10', '2010-11', '2011-12', '2012-13', '2013-14', '2014-15',", "1610612761, 'UTA': 1610612762, 'VAN': 1610612763, # Old name. 'MEM': 1610612763, 'WAS': 1610612764, 'DET':", "'MIL' 1610612750, # 'MIN' 1610612751, # 'BKN' 1610612752, # 'NYK' 1610612753, # 'ORL'", "# 'UTA' 1610612763, # 'MEM' 1610612764, # 'WAS' 1610612765, # 'DET' 1610612766, #", "name. 'NYK': 1610612752, 'ORL': 1610612753, 'IND': 1610612754, 'PHI': 1610612755, 'PHX': 1610612756, 'POR': 1610612757,", "1610612739, # 'CLE' 1610612740, # 'NOP' 1610612741, # 'CHI' 1610612742, # 'DAL' 1610612743,", "# 'DAL' 1610612743, # 'DEN' 1610612744, # 'GSW' 1610612745, # 'HOU' 1610612746, #", "'CLE': 1610612739, 'NOP': 1610612740, 'NOK': 1610612740, # Old name. 'NOH': 1610612740, # Old", "from team abbrev to id. \"\"\" team_abbrev_mapping = { 'ATL': 1610612737, 'BOS': 1610612738,", "1610612743, 'GSW': 1610612744, 'HOU': 1610612745, 'LAC': 1610612746, 'LAL': 1610612747, 'MIA': 1610612748, 'MIL': 1610612749,", "1610612748, 'MIL': 1610612749, 'MIN': 1610612750, 'BKN': 1610612751, 'NJN': 1610612751, # Old name. 'NYK':", "'2001-02', '2002-03', '2003-04', '2004-05', '2005-06', '2006-07', '2007-08', '2008-09', '2009-10', '2010-11', '2011-12', '2012-13', '2013-14',", "'ORL': 1610612753, 'IND': 1610612754, 'PHI': 1610612755, 'PHX': 1610612756, 'POR': 1610612757, 'SAC': 1610612758, 'SAS':", "11, 'string': 'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'}, {'id': 13, 'string': 'PERIOD_END'}, {'id': 18,", "SubType). We're going to make a lookup table of enum to value, then", "'2021-22' ] \"\"\" Headers. \"\"\" headers = { 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain,", "9, 'string': 'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'}, {'id': 12,", "used in the application. \"\"\" \"\"\" List of seasons. \"\"\" season_list = [", "# Old name. 'NYK': 1610612752, 'ORL': 1610612753, 'IND': 1610612754, 'PHI': 1610612755, 'PHX': 1610612756,", "'DAL' 1610612743, # 'DEN' 1610612744, # 'GSW' 1610612745, # 'HOU' 1610612746, # 'LAC'", "of seasons. \"\"\" season_list = [ '1996-97', '1997-98', '1998-99', '1999-00', '2000-01', '2001-02', '2002-03',", "'OKC' 1610612761, # 'TOR' 1610612762, # 'UTA' 1610612763, # 'MEM' 1610612764, # 'WAS'", "1610612763, 'WAS': 1610612764, 'DET': 1610612765, 'CHA': 1610612766, 'CHH': 1610612766, # Old name. }", "'CLE' 1610612740, # 'NOP' 1610612741, # 'CHI' 1610612742, # 'DAL' 1610612743, # 'DEN'", "#'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7)", "'2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20', '2020-21', '2021-22' ] \"\"\" Headers. \"\"\"", "'NOK': 1610612740, # Old name. 'NOH': 1610612740, # Old name. 'CHI': 1610612741, 'DAL':", "1610612752, 'ORL': 1610612753, 'IND': 1610612754, 'PHI': 1610612755, 'PHX': 1610612756, 'POR': 1610612757, 'SAC': 1610612758,", "name. } \"\"\" Play-by-play data has an EventMsgType field. This is an enum.", "1610612751, 'NJN': 1610612751, # Old name. 'NYK': 1610612752, 'ORL': 1610612753, 'IND': 1610612754, 'PHI':", "(KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36", "# 'PHI' 1610612756, # 'PHX' 1610612757, # 'POR' 1610612758, # 'SAC' 1610612759, #", "# Old name. 'CHI': 1610612741, 'DAL': 1610612742, 'DEN': 1610612743, 'GSW': 1610612744, 'HOU': 1610612745,", "'LAL': 1610612747, 'MIA': 1610612748, 'MIL': 1610612749, 'MIN': 1610612750, 'BKN': 1610612751, 'NJN': 1610612751, #", "OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site':", "'SAS': 1610612759, 'OKC': 1610612760, 'SEA': 1610612760, 'TOR': 1610612761, 'UTA': 1610612762, 'VAN': 1610612763, #", "Old name. 'NYK': 1610612752, 'ORL': 1610612753, 'IND': 1610612754, 'PHI': 1610612755, 'PHX': 1610612756, 'POR':", "'POR': 1610612757, 'SAC': 1610612758, 'SAS': 1610612759, 'OKC': 1610612760, 'SEA': 1610612760, 'TOR': 1610612761, 'UTA':", "'MIN': 1610612750, 'BKN': 1610612751, 'NJN': 1610612751, # Old name. 'NYK': 1610612752, 'ORL': 1610612753,", "'REBOUND'}, {'id': 5, 'string': 'TURNOVER'}, {'id': 6, 'string': 'FOUL'}, {'id': 7, 'string': 'VIOLATION'},", "1610612757, 'SAC': 1610612758, 'SAS': 1610612759, 'OKC': 1610612760, 'SEA': 1610612760, 'TOR': 1610612761, 'UTA': 1610612762,", "Old name. 'CHI': 1610612741, 'DAL': 1610612742, 'DEN': 1610612743, 'GSW': 1610612744, 'HOU': 1610612745, 'LAC':", "'TURNOVER'}, {'id': 6, 'string': 'FOUL'}, {'id': 7, 'string': 'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'},", "'2003-04', '2004-05', '2005-06', '2006-07', '2007-08', '2008-09', '2009-10', '2010-11', '2011-12', '2012-13', '2013-14', '2014-15', '2015-16',", "= [ '1996-97', '1997-98', '1998-99', '1999-00', '2000-01', '2001-02', '2002-03', '2003-04', '2004-05', '2005-06', '2006-07',", "'string': 'FOUL'}, {'id': 7, 'string': 'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'}, {'id': 9, 'string':", "lookup table of enum to value, then a lookup table for the (EventMsgType,", "to make a lookup table of enum to value, then a lookup table", "1610612757, # 'POR' 1610612758, # 'SAC' 1610612759, # 'SAS' 1610612760, # 'OKC' 1610612761,", "'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\"", "# 'WAS' 1610612765, # 'DET' 1610612766, # 'CHA' ] \"\"\" Mapping from team", "10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode':", "a lookup table for the (EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types = [ {'id':", "X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin',", "'string': 'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'}, {'id': 10, 'string':", "\"\"\" team_ids = [ 1610612737, # 'ATL' 1610612738, # 'BOS' 1610612739, # 'CLE'", "\"\"\" Headers. \"\"\" headers = { 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token':", "'CHA': 1610612766, 'CHH': 1610612766, # Old name. } \"\"\" Play-by-play data has an", "'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'}, {'id': 5, 'string':", "# 'GSW' 1610612745, # 'HOU' 1610612746, # 'LAC' 1610612747, # 'LAL' 1610612748, #", "'NJN': 1610612751, # Old name. 'NYK': 1610612752, 'ORL': 1610612753, 'IND': 1610612754, 'PHI': 1610612755,", "'2020-21', '2021-22' ] \"\"\" Headers. \"\"\" headers = { 'Connection': 'keep-alive', 'Accept': 'application/json,", "1610612760, 'TOR': 1610612761, 'UTA': 1610612762, 'VAN': 1610612763, # Old name. 'MEM': 1610612763, 'WAS':", "'CHA' ] \"\"\" Mapping from team abbrev to id. \"\"\" team_abbrev_mapping = {", "'PHI': 1610612755, 'PHX': 1610612756, 'POR': 1610612757, 'SAC': 1610612758, 'SAS': 1610612759, 'OKC': 1610612760, 'SEA':", "1610612744, 'HOU': 1610612745, 'LAC': 1610612746, 'LAL': 1610612747, 'MIA': 1610612748, 'MIL': 1610612749, 'MIN': 1610612750,", "# 'IND' 1610612755, # 'PHI' 1610612756, # 'PHX' 1610612757, # 'POR' 1610612758, #", "'2010-11', '2011-12', '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20', '2020-21', '2021-22' ]", "'SAC' 1610612759, # 'SAS' 1610612760, # 'OKC' 1610612761, # 'TOR' 1610612762, # 'UTA'", "1610612749, 'MIN': 1610612750, 'BKN': 1610612751, 'NJN': 1610612751, # Old name. 'NYK': 1610612752, 'ORL':", "the EventMsgActionField, which is a complex enum of (EventMsgType, SubType). We're going to", "br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team IDs. (Thank you nba-api). \"\"\" team_ids =", "Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding':", "= [ {'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string':", "1610612761, # 'TOR' 1610612762, # 'UTA' 1610612763, # 'MEM' 1610612764, # 'WAS' 1610612765,", "'DEN' 1610612744, # 'GSW' 1610612745, # 'HOU' 1610612746, # 'LAC' 1610612747, # 'LAL'", "\"\"\" headers = { 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent':", "[ 1610612737, # 'ATL' 1610612738, # 'BOS' 1610612739, # 'CLE' 1610612740, # 'NOP'", "'SAS' 1610612760, # 'OKC' 1610612761, # 'TOR' 1610612762, # 'UTA' 1610612763, # 'MEM'", "id. \"\"\" team_abbrev_mapping = { 'ATL': 1610612737, 'BOS': 1610612738, 'CLE': 1610612739, 'NOP': 1610612740,", "'WAS': 1610612764, 'DET': 1610612765, 'CHA': 1610612766, 'CHH': 1610612766, # Old name. } \"\"\"", "Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip,", "'2016-17', '2017-18', '2018-19', '2019-20', '2020-21', '2021-22' ] \"\"\" Headers. \"\"\" headers = {", "'UTA': 1610612762, 'VAN': 1610612763, # Old name. 'MEM': 1610612763, 'WAS': 1610612764, 'DET': 1610612765,", "enum of (EventMsgType, SubType). We're going to make a lookup table of enum", "table for the (EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types = [ {'id': 1, 'string':", "EventMsgActionField, which is a complex enum of (EventMsgType, SubType). We're going to make", "{'id': 8, 'string': 'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'}, {'id':", "# 'ORL' 1610612754, # 'IND' 1610612755, # 'PHI' 1610612756, # 'PHX' 1610612757, #", "'GSW': 1610612744, 'HOU': 1610612745, 'LAC': 1610612746, 'LAL': 1610612747, 'MIA': 1610612748, 'MIL': 1610612749, 'MIN':", "'NOP': 1610612740, 'NOK': 1610612740, # Old name. 'NOH': 1610612740, # Old name. 'CHI':", "] \"\"\" Mapping from team abbrev to id. \"\"\" team_abbrev_mapping = { 'ATL':", "'2008-09', '2009-10', '2010-11', '2011-12', '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20', '2020-21',", "in the application. \"\"\" \"\"\" List of seasons. \"\"\" season_list = [ '1996-97',", "'DAL': 1610612742, 'DEN': 1610612743, 'GSW': 1610612744, 'HOU': 1610612745, 'LAC': 1610612746, 'LAL': 1610612747, 'MIA':", "1610612752, # 'NYK' 1610612753, # 'ORL' 1610612754, # 'IND' 1610612755, # 'PHI' 1610612756,", "1610612758, 'SAS': 1610612759, 'OKC': 1610612760, 'SEA': 1610612760, 'TOR': 1610612761, 'UTA': 1610612762, 'VAN': 1610612763,", "is a complex enum of (EventMsgType, SubType). We're going to make a lookup", "1610612750, 'BKN': 1610612751, 'NJN': 1610612751, # Old name. 'NYK': 1610612752, 'ORL': 1610612753, 'IND':", "Headers. \"\"\" headers = { 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token': 'true',", "'1997-98', '1998-99', '1999-00', '2000-01', '2001-02', '2002-03', '2003-04', '2004-05', '2005-06', '2006-07', '2007-08', '2008-09', '2009-10',", "make a lookup table of enum to value, then a lookup table for", "Mac OS X 10_14_6) ' #'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh;", "of enum to value, then a lookup table for the (EventMsgType, EventMsgActionType) pair.", "1610612738, # 'BOS' 1610612739, # 'CLE' 1610612740, # 'NOP' 1610612741, # 'CHI' 1610612742,", "{'id': 4, 'string': 'REBOUND'}, {'id': 5, 'string': 'TURNOVER'}, {'id': 6, 'string': 'FOUL'}, {'id':", "# 'LAC' 1610612747, # 'LAL' 1610612748, # 'MIA' 1610612749, # 'MIL' 1610612750, #", "'2018-19', '2019-20', '2020-21', '2021-22' ] \"\"\" Headers. \"\"\" headers = { 'Connection': 'keep-alive',", "\"\"\" Play-by-play data has an EventMsgType field. This is an enum. There is", "field. This is an enum. There is also the EventMsgActionField, which is a", "'string': 'REBOUND'}, {'id': 5, 'string': 'TURNOVER'}, {'id': 6, 'string': 'FOUL'}, {'id': 7, 'string':", "2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'}, {'id': 5,", "7, 'string': 'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'}, {'id': 10,", "(Macintosh; Intel Mac OS X 10_14_6) ' #'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36'", "'NYK': 1610612752, 'ORL': 1610612753, 'IND': 1610612754, 'PHI': 1610612755, 'PHX': 1610612756, 'POR': 1610612757, 'SAC':", "# 'BKN' 1610612752, # 'NYK' 1610612753, # 'ORL' 1610612754, # 'IND' 1610612755, #", "is an enum. There is also the EventMsgActionField, which is a complex enum", "1610612741, # 'CHI' 1610612742, # 'DAL' 1610612743, # 'DEN' 1610612744, # 'GSW' 1610612745,", "We're going to make a lookup table of enum to value, then a", "'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'}, {'id': 4, 'string':", "'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team IDs. (Thank you nba-api). \"\"\"", "[ '1996-97', '1997-98', '1998-99', '1999-00', '2000-01', '2001-02', '2002-03', '2003-04', '2004-05', '2005-06', '2006-07', '2007-08',", "(EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types = [ {'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2,", "# 'DEN' 1610612744, # 'GSW' 1610612745, # 'HOU' 1610612746, # 'LAC' 1610612747, #", "1610612740, 'NOK': 1610612740, # Old name. 'NOH': 1610612740, # Old name. 'CHI': 1610612741,", "1610612760, # 'OKC' 1610612761, # 'TOR' 1610612762, # 'UTA' 1610612763, # 'MEM' 1610612764,", "'MEM': 1610612763, 'WAS': 1610612764, 'DET': 1610612765, 'CHA': 1610612766, 'CHH': 1610612766, # Old name.", "1610612753, # 'ORL' 1610612754, # 'IND' 1610612755, # 'PHI' 1610612756, # 'PHX' 1610612757,", "'2009-10', '2010-11', '2011-12', '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20', '2020-21', '2021-22'", "abbrev to id. \"\"\" team_abbrev_mapping = { 'ATL': 1610612737, 'BOS': 1610612738, 'CLE': 1610612739,", "'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) ' #'AppleWebKit/537.36 (KHTML, like", "also the EventMsgActionField, which is a complex enum of (EventMsgType, SubType). We're going", "1610612738, 'CLE': 1610612739, 'NOP': 1610612740, 'NOK': 1610612740, # Old name. 'NOH': 1610612740, #", "1610612747, # 'LAL' 1610612748, # 'MIA' 1610612749, # 'MIL' 1610612750, # 'MIN' 1610612751,", "1610612755, 'PHX': 1610612756, 'POR': 1610612757, 'SAC': 1610612758, 'SAS': 1610612759, 'OKC': 1610612760, 'SEA': 1610612760,", "enum. There is also the EventMsgActionField, which is a complex enum of (EventMsgType,", "the application. \"\"\" \"\"\" List of seasons. \"\"\" season_list = [ '1996-97', '1997-98',", "'HOU' 1610612746, # 'LAC' 1610612747, # 'LAL' 1610612748, # 'MIA' 1610612749, # 'MIL'", "{ 'Connection': 'keep-alive', 'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh;", "'2007-08', '2008-09', '2009-10', '2010-11', '2011-12', '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20',", "1610612755, # 'PHI' 1610612756, # 'PHX' 1610612757, # 'POR' 1610612758, # 'SAC' 1610612759,", "Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate,", "1610612763, # 'MEM' 1610612764, # 'WAS' 1610612765, # 'DET' 1610612766, # 'CHA' ]", "'POR' 1610612758, # 'SAC' 1610612759, # 'SAS' 1610612760, # 'OKC' 1610612761, # 'TOR'", "'string': 'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'}, {'id': 5, 'string': 'TURNOVER'}, {'id': 6, 'string':", "Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin':", "OS X 10_14_6) ' #'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130' #'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel", "# 'PHX' 1610612757, # 'POR' 1610612758, # 'SAC' 1610612759, # 'SAS' 1610612760, #", "'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'", "1610612746, 'LAL': 1610612747, 'MIA': 1610612748, 'MIL': 1610612749, 'MIN': 1610612750, 'BKN': 1610612751, 'NJN': 1610612751,", "# 'SAS' 1610612760, # 'OKC' 1610612761, # 'TOR' 1610612762, # 'UTA' 1610612763, #", "# 'BOS' 1610612739, # 'CLE' 1610612740, # 'NOP' 1610612741, # 'CHI' 1610612742, #", "(KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer':", "1610612742, # 'DAL' 1610612743, # 'DEN' 1610612744, # 'GSW' 1610612745, # 'HOU' 1610612746,", "'BKN' 1610612752, # 'NYK' 1610612753, # 'ORL' 1610612754, # 'IND' 1610612755, # 'PHI'", "{'id': 12, 'string': 'PERIOD_BEGIN'}, {'id': 13, 'string': 'PERIOD_END'}, {'id': 18, 'string': 'UNKNOWN'} ]", "pair. \"\"\" event_message_types = [ {'id': 1, 'string': 'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'},", "\"\"\" team_abbrev_mapping = { 'ATL': 1610612737, 'BOS': 1610612738, 'CLE': 1610612739, 'NOP': 1610612740, 'NOK':", "'DET' 1610612766, # 'CHA' ] \"\"\" Mapping from team abbrev to id. \"\"\"", "season_list = [ '1996-97', '1997-98', '1998-99', '1999-00', '2000-01', '2001-02', '2002-03', '2003-04', '2004-05', '2005-06',", "going to make a lookup table of enum to value, then a lookup", "#'Safari/537.36' 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114", "1610612749, # 'MIL' 1610612750, # 'MIN' 1610612751, # 'BKN' 1610612752, # 'NYK' 1610612753,", "'VIOLATION'}, {'id': 8, 'string': 'SUBSTITUTION'}, {'id': 9, 'string': 'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'},", "\"\"\" List of seasons. \"\"\" season_list = [ '1996-97', '1997-98', '1998-99', '1999-00', '2000-01',", "# Old name. } \"\"\" Play-by-play data has an EventMsgType field. This is", "like Gecko) Chrome/91.0.4472.114 Safari/537.36' ), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/',", "1610612739, 'NOP': 1610612740, 'NOK': 1610612740, # Old name. 'NOH': 1610612740, # Old name.", "1610612743, # 'DEN' 1610612744, # 'GSW' 1610612745, # 'HOU' 1610612746, # 'LAC' 1610612747,", "for the (EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types = [ {'id': 1, 'string': 'FIELD_GOAL_MADE'},", "), 'x-nba-stats-origin': 'stats', 'Sec-Fetch-Site': 'same-origin', 'Sec-Fetch-Mode': 'cors', 'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br',", "Constants used in the application. \"\"\" \"\"\" List of seasons. \"\"\" season_list =", "1610612745, # 'HOU' 1610612746, # 'LAC' 1610612747, # 'LAL' 1610612748, # 'MIA' 1610612749,", "'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'}, {'id': 13, 'string': 'PERIOD_END'},", "# 'DET' 1610612766, # 'CHA' ] \"\"\" Mapping from team abbrev to id.", "1610612742, 'DEN': 1610612743, 'GSW': 1610612744, 'HOU': 1610612745, 'LAC': 1610612746, 'LAL': 1610612747, 'MIA': 1610612748,", "'VAN': 1610612763, # Old name. 'MEM': 1610612763, 'WAS': 1610612764, 'DET': 1610612765, 'CHA': 1610612766,", "This is an enum. There is also the EventMsgActionField, which is a complex", "'CHH': 1610612766, # Old name. } \"\"\" Play-by-play data has an EventMsgType field.", "(EventMsgType, SubType). We're going to make a lookup table of enum to value,", "'2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19', '2019-20', '2020-21', '2021-22' ] \"\"\" Headers.", "# 'POR' 1610612758, # 'SAC' 1610612759, # 'SAS' 1610612760, # 'OKC' 1610612761, #", "Play-by-play data has an EventMsgType field. This is an enum. There is also", "#'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) ' #'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130'", "name. 'MEM': 1610612763, 'WAS': 1610612764, 'DET': 1610612765, 'CHA': 1610612766, 'CHH': 1610612766, # Old", "'2006-07', '2007-08', '2008-09', '2009-10', '2010-11', '2011-12', '2012-13', '2013-14', '2014-15', '2015-16', '2016-17', '2017-18', '2018-19',", "'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'}, {'id': 5, 'string': 'TURNOVER'}, {'id': 6, 'string': 'FOUL'},", "{'id': 3, 'string': 'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'}, {'id': 5, 'string': 'TURNOVER'}, {'id':", "team_abbrev_mapping = { 'ATL': 1610612737, 'BOS': 1610612738, 'CLE': 1610612739, 'NOP': 1610612740, 'NOK': 1610612740,", "{'id': 9, 'string': 'TIMEOUT'}, {'id': 10, 'string': 'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'}, {'id':", "then a lookup table for the (EventMsgType, EventMsgActionType) pair. \"\"\" event_message_types = [", "'Referer': 'https://stats.nba.com/', 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team IDs. (Thank", "complex enum of (EventMsgType, SubType). We're going to make a lookup table of", "( #'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) ' #'AppleWebKit/537.36 (KHTML, like Gecko)", "'NOP' 1610612741, # 'CHI' 1610612742, # 'DAL' 1610612743, # 'DEN' 1610612744, # 'GSW'", "# 'HOU' 1610612746, # 'LAC' 1610612747, # 'LAL' 1610612748, # 'MIA' 1610612749, #", "= [ 1610612737, # 'ATL' 1610612738, # 'BOS' 1610612739, # 'CLE' 1610612740, #", "enum to value, then a lookup table for the (EventMsgType, EventMsgActionType) pair. \"\"\"", "Old name. } \"\"\" Play-by-play data has an EventMsgType field. This is an", "1610612750, # 'MIN' 1610612751, # 'BKN' 1610612752, # 'NYK' 1610612753, # 'ORL' 1610612754,", "1610612740, # Old name. 'NOH': 1610612740, # Old name. 'CHI': 1610612741, 'DAL': 1610612742,", "'SAC': 1610612758, 'SAS': 1610612759, 'OKC': 1610612760, 'SEA': 1610612760, 'TOR': 1610612761, 'UTA': 1610612762, 'VAN':", "1610612762, 'VAN': 1610612763, # Old name. 'MEM': 1610612763, 'WAS': 1610612764, 'DET': 1610612765, 'CHA':", "'keep-alive', 'Accept': 'application/json, text/plain, */*', 'x-nba-stats-token': 'true', 'User-Agent': ( #'Mozilla/5.0 (Macintosh; Intel Mac", "data has an EventMsgType field. This is an enum. There is also the", "10, 'string': 'JUMP_BALL'}, {'id': 11, 'string': 'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'}, {'id': 13,", "{'id': 11, 'string': 'EJECTION'}, {'id': 12, 'string': 'PERIOD_BEGIN'}, {'id': 13, 'string': 'PERIOD_END'}, {'id':", "'FIELD_GOAL_MADE'}, {'id': 2, 'string': 'FIELD_GOAL_MISSED'}, {'id': 3, 'string': 'FREE_THROW'}, {'id': 4, 'string': 'REBOUND'},", "# 'CHI' 1610612742, # 'DAL' 1610612743, # 'DEN' 1610612744, # 'GSW' 1610612745, #", "has an EventMsgType field. This is an enum. There is also the EventMsgActionField,", "# 'MEM' 1610612764, # 'WAS' 1610612765, # 'DET' 1610612766, # 'CHA' ] \"\"\"", "'SEA': 1610612760, 'TOR': 1610612761, 'UTA': 1610612762, 'VAN': 1610612763, # Old name. 'MEM': 1610612763,", "1610612766, 'CHH': 1610612766, # Old name. } \"\"\" Play-by-play data has an EventMsgType", "'Accept-Language': 'en-US,en;q=0.9', } \"\"\" Team IDs. (Thank you nba-api). \"\"\" team_ids = [", "'OKC': 1610612760, 'SEA': 1610612760, 'TOR': 1610612761, 'UTA': 1610612762, 'VAN': 1610612763, # Old name.", "1610612758, # 'SAC' 1610612759, # 'SAS' 1610612760, # 'OKC' 1610612761, # 'TOR' 1610612762," ]
[ "kokoropy.controller import Crud_Controller from ..models._all import Page class Page_Controller(Crud_Controller): __model__ = Page Page_Controller.publish_route()", "from kokoropy.controller import Crud_Controller from ..models._all import Page class Page_Controller(Crud_Controller): __model__ = Page" ]
[ "2 + l # This binary search always returns the lower bound on", "# one from the index it finds which is going to be the", "next natural number after the targer number) # Difficulty: Medium def searchRange(nums: List[int],", "first number larger than the target, and then subtracts # one from the", "half the difference of the left # and right pointers offset by the", "# In each iteration set the midpoint to half the difference of the", "indices if lowerIndex < len(nums) and nums[lowerIndex] == target: return [lowerIndex, upperIndex] else:", "equals our # target (because our binary search will return the next largest", "1 # If we didn't go out of bounds in our search and", "number and one # for the successor of the target (the next natural", "midpoint to half the difference of the left # and right pointers offset", "<= and not just != means it can also # catch cases when", "of mid, # and if the number is greater than or equal to", "searches, one for the target number and one # for the successor of", "the first and last indices of a target number # Solution: Run two", "if the current number is less than target it shifts left to the", "number after the targer number) # Difficulty: Medium def searchRange(nums: List[int], target: int)", "is an empty array, as l = 0 and r = -1 in", "larger than the target, and then subtracts # one from the index it", "exist) we can return the indices if lowerIndex < len(nums) and nums[lowerIndex] ==", "to half the difference of the left # and right pointers offset by", "number if nums[mid] < target: l = mid + 1 else: r =", "current number is less than target it shifts left to the next number", "is greater than or equal to the target it shifts right to the", "which is going to be the rightmost target upperIndex = lower(nums, target +", "This finds the index of the first number larger than the target, and", "returns the lower bound on a number because # if the current number", "(because our binary search will return the next largest number if it didn't", "of the lowest target lowerIndex = lower(nums, target) # This finds the index", "be the rightmost target upperIndex = lower(nums, target + 1) - 1 #", "and if the number is greater than or equal to the target it", "- l) // 2 + l # This binary search always returns the", "mid = (r - l) // 2 + l # This binary search", "of a target number # Solution: Run two (lower bounded) binary searches, one", "the lower bound on a number because # if the current number is", "return l # This simply finds the index of the lowest target lowerIndex", "1 else: r = mid - 1 return l # This simply finds", "# and if the number is greater than or equal to the target", "next largest number if it didn't exist) we can return the indices if", "of bounds in our search and if the number at the lowerIndex actually", "it can also # catch cases when the input is an empty array,", "didn't go out of bounds in our search and if the number at", "return the indices if lowerIndex < len(nums) and nums[lowerIndex] == target: return [lowerIndex,", "l) // 2 + l # This binary search always returns the lower", "than target it shifts left to the next number to the right of", "number is less than target it shifts left to the next number to", "search always returns the lower bound on a number because # if the", "mid, # and if the number is greater than or equal to the", "while statement to be <= and not just != means it can also", "a number because # if the current number is less than target it", "r = 0, len(nums) - 1 # Note: setting this while statement to", "setting this while statement to be <= and not just != means it", "+ 1) - 1 # If we didn't go out of bounds in", "into the leftmost number if nums[mid] < target: l = mid + 1", "< target: l = mid + 1 else: r = mid - 1", "for the successor of the target (the next natural number after the targer", "and r = -1 in that case while l <= r: # In", "simply finds the index of the lowest target lowerIndex = lower(nums, target) #", "1 # Note: setting this while statement to be <= and not just", "the target number and one # for the successor of the target (the", "can return the indices if lowerIndex < len(nums) and nums[lowerIndex] == target: return", "binary searches, one for the target number and one # for the successor", "lower bound on a number because # if the current number is less", "empty array, as l = 0 and r = -1 in that case", "# This finds the index of the first number larger than the target,", "!= means it can also # catch cases when the input is an", "number is greater than or equal to the target it shifts right to", "one from the index it finds which is going to be the rightmost", "than the target, and then subtracts # one from the index it finds", "+ l # This binary search always returns the lower bound on a", "the left pointer mid = (r - l) // 2 + l #", "Given a sorted array, find the first and last indices of a target", "# This binary search always returns the lower bound on a number because", "target: l = mid + 1 else: r = mid - 1 return", "our binary search will return the next largest number if it didn't exist)", "successor of the target (the next natural number after the targer number) #", "(r - l) // 2 + l # This binary search always returns", "# this ensures that if numbers are duplicated the search will always narrow", "number larger than the target, and then subtracts # one from the index", "the left # and right pointers offset by the left pointer mid =", "= mid + 1 else: r = mid - 1 return l #", "the next largest number if it didn't exist) we can return the indices", "the target it shifts right to the number to the left of mid", "in that case while l <= r: # In each iteration set the", "bound on a number because # if the current number is less than", "Note: setting this while statement to be <= and not just != means", "is going to be the rightmost target upperIndex = lower(nums, target + 1)", "numbers are duplicated the search will always narrow into the leftmost number if", "binary search will return the next largest number if it didn't exist) we", "and then subtracts # one from the index it finds which is going", "= 0 and r = -1 in that case while l <= r:", "This simply finds the index of the lowest target lowerIndex = lower(nums, target)", "the midpoint to half the difference of the left # and right pointers", "r = mid - 1 return l # This simply finds the index", "search and if the number at the lowerIndex actually equals our # target", "number because # if the current number is less than target it shifts", "shifts left to the next number to the right of mid, # and", "1) - 1 # If we didn't go out of bounds in our", "that if numbers are duplicated the search will always narrow into the leftmost", "it shifts left to the next number to the right of mid, #", "search will always narrow into the leftmost number if nums[mid] < target: l", "a sorted array, find the first and last indices of a target number", "each iteration set the midpoint to half the difference of the left #", "in our search and if the number at the lowerIndex actually equals our", "lower(nums, target) # This finds the index of the first number larger than", "the current number is less than target it shifts left to the next", "= -1 in that case while l <= r: # In each iteration", "one for the target number and one # for the successor of the", "just != means it can also # catch cases when the input is", "= 0, len(nums) - 1 # Note: setting this while statement to be", "pointers offset by the left pointer mid = (r - l) // 2", "then subtracts # one from the index it finds which is going to", "Solution: Run two (lower bounded) binary searches, one for the target number and", "Medium def searchRange(nums: List[int], target: int) -> List[int]: def lowerBin(nums, target): l, r", "if the number at the lowerIndex actually equals our # target (because our", "and one # for the successor of the target (the next natural number", "# for the successor of the target (the next natural number after the", "a target number # Solution: Run two (lower bounded) binary searches, one for", "the target (the next natural number after the targer number) # Difficulty: Medium", "lowerBin(nums, target): l, r = 0, len(nums) - 1 # Note: setting this", "rightmost target upperIndex = lower(nums, target + 1) - 1 # If we", "the targer number) # Difficulty: Medium def searchRange(nums: List[int], target: int) -> List[int]:", "- 1 # Note: setting this while statement to be <= and not", "if nums[mid] < target: l = mid + 1 else: r = mid", "finds the index of the lowest target lowerIndex = lower(nums, target) # This", "l <= r: # In each iteration set the midpoint to half the", "number) # Difficulty: Medium def searchRange(nums: List[int], target: int) -> List[int]: def lowerBin(nums,", "to the left of mid # this ensures that if numbers are duplicated", "+ 1 else: r = mid - 1 return l # This simply", "as l = 0 and r = -1 in that case while l", "= (r - l) // 2 + l # This binary search always", "search will return the next largest number if it didn't exist) we can", "left pointer mid = (r - l) // 2 + l # This", "catch cases when the input is an empty array, as l = 0", "number # Solution: Run two (lower bounded) binary searches, one for the target", "case while l <= r: # In each iteration set the midpoint to", "find the first and last indices of a target number # Solution: Run", "one # for the successor of the target (the next natural number after", "<= r: # In each iteration set the midpoint to half the difference", "greater than or equal to the target it shifts right to the number", "the difference of the left # and right pointers offset by the left", "return the next largest number if it didn't exist) we can return the", "cases when the input is an empty array, as l = 0 and", "-1 in that case while l <= r: # In each iteration set", "or equal to the target it shifts right to the number to the", "If we didn't go out of bounds in our search and if the", "number at the lowerIndex actually equals our # target (because our binary search", "if lowerIndex < len(nums) and nums[lowerIndex] == target: return [lowerIndex, upperIndex] else: return", "when the input is an empty array, as l = 0 and r", "actually equals our # target (because our binary search will return the next", "the rightmost target upperIndex = lower(nums, target + 1) - 1 # If", "is less than target it shifts left to the next number to the", "to the next number to the right of mid, # and if the", "the number at the lowerIndex actually equals our # target (because our binary", "# Difficulty: Medium def searchRange(nums: List[int], target: int) -> List[int]: def lowerBin(nums, target):", "In each iteration set the midpoint to half the difference of the left", "Run two (lower bounded) binary searches, one for the target number and one", "searchRange(nums: List[int], target: int) -> List[int]: def lowerBin(nums, target): l, r = 0,", "# Note: setting this while statement to be <= and not just !=", "statement to be <= and not just != means it can also #", "(lower bounded) binary searches, one for the target number and one # for", "the input is an empty array, as l = 0 and r =", "l = 0 and r = -1 in that case while l <=", "indices of a target number # Solution: Run two (lower bounded) binary searches,", "the left of mid # this ensures that if numbers are duplicated the", "go out of bounds in our search and if the number at the", "lowerIndex = lower(nums, target) # This finds the index of the first number", "left of mid # this ensures that if numbers are duplicated the search", "last indices of a target number # Solution: Run two (lower bounded) binary", "for the target number and one # for the successor of the target", "difference of the left # and right pointers offset by the left pointer", "< len(nums) and nums[lowerIndex] == target: return [lowerIndex, upperIndex] else: return [-1, -1]", "List[int]: def lowerBin(nums, target): l, r = 0, len(nums) - 1 # Note:", "target it shifts left to the next number to the right of mid,", "the leftmost number if nums[mid] < target: l = mid + 1 else:", "and not just != means it can also # catch cases when the", "lowest target lowerIndex = lower(nums, target) # This finds the index of the", "target, and then subtracts # one from the index it finds which is", "equal to the target it shifts right to the number to the left", "the successor of the target (the next natural number after the targer number)", "target): l, r = 0, len(nums) - 1 # Note: setting this while", "number to the left of mid # this ensures that if numbers are", "array, find the first and last indices of a target number # Solution:", "two (lower bounded) binary searches, one for the target number and one #", "to the number to the left of mid # this ensures that if", "the lowerIndex actually equals our # target (because our binary search will return", "index of the lowest target lowerIndex = lower(nums, target) # This finds the", "Question: Given a sorted array, find the first and last indices of a", "lowerIndex actually equals our # target (because our binary search will return the", "target: int) -> List[int]: def lowerBin(nums, target): l, r = 0, len(nums) -", "offset by the left pointer mid = (r - l) // 2 +", "the number to the left of mid # this ensures that if numbers", "duplicated the search will always narrow into the leftmost number if nums[mid] <", "mid - 1 return l # This simply finds the index of the", "and if the number at the lowerIndex actually equals our # target (because", "target lowerIndex = lower(nums, target) # This finds the index of the first", "the lowest target lowerIndex = lower(nums, target) # This finds the index of", "Difficulty: Medium def searchRange(nums: List[int], target: int) -> List[int]: def lowerBin(nums, target): l,", "# if the current number is less than target it shifts left to", "= mid - 1 return l # This simply finds the index of", "bounded) binary searches, one for the target number and one # for the", "the number is greater than or equal to the target it shifts right", "are duplicated the search will always narrow into the leftmost number if nums[mid]", "subtracts # one from the index it finds which is going to be", "pointer mid = (r - l) // 2 + l # This binary", "- 1 # If we didn't go out of bounds in our search", "# catch cases when the input is an empty array, as l =", "nums[mid] < target: l = mid + 1 else: r = mid -", "the index of the lowest target lowerIndex = lower(nums, target) # This finds", "the right of mid, # and if the number is greater than or", "mid # this ensures that if numbers are duplicated the search will always", "an empty array, as l = 0 and r = -1 in that", "of the left # and right pointers offset by the left pointer mid", "of mid # this ensures that if numbers are duplicated the search will", "if the number is greater than or equal to the target it shifts", "the next number to the right of mid, # and if the number", "largest number if it didn't exist) we can return the indices if lowerIndex", "to be <= and not just != means it can also # catch", "the target, and then subtracts # one from the index it finds which", "the index it finds which is going to be the rightmost target upperIndex", "targer number) # Difficulty: Medium def searchRange(nums: List[int], target: int) -> List[int]: def", "1 return l # This simply finds the index of the lowest target", "of the first number larger than the target, and then subtracts # one", "upperIndex = lower(nums, target + 1) - 1 # If we didn't go", "= lower(nums, target) # This finds the index of the first number larger", "0 and r = -1 in that case while l <= r: #", "will always narrow into the leftmost number if nums[mid] < target: l =", "target it shifts right to the number to the left of mid #", "if numbers are duplicated the search will always narrow into the leftmost number", "target (the next natural number after the targer number) # Difficulty: Medium def", "because # if the current number is less than target it shifts left", "mid + 1 else: r = mid - 1 return l # This", "target number # Solution: Run two (lower bounded) binary searches, one for the", "# This simply finds the index of the lowest target lowerIndex = lower(nums,", "lowerIndex < len(nums) and nums[lowerIndex] == target: return [lowerIndex, upperIndex] else: return [-1,", "# Question: Given a sorted array, find the first and last indices of", "shifts right to the number to the left of mid # this ensures", "the index of the first number larger than the target, and then subtracts", "left to the next number to the right of mid, # and if", "natural number after the targer number) # Difficulty: Medium def searchRange(nums: List[int], target:", "# If we didn't go out of bounds in our search and if", "we didn't go out of bounds in our search and if the number", "our # target (because our binary search will return the next largest number", "(the next natural number after the targer number) # Difficulty: Medium def searchRange(nums:", "def lowerBin(nums, target): l, r = 0, len(nums) - 1 # Note: setting", "means it can also # catch cases when the input is an empty", "target (because our binary search will return the next largest number if it", "List[int], target: int) -> List[int]: def lowerBin(nums, target): l, r = 0, len(nums)", "index of the first number larger than the target, and then subtracts #", "it didn't exist) we can return the indices if lowerIndex < len(nums) and", "array, as l = 0 and r = -1 in that case while", "will return the next largest number if it didn't exist) we can return", "r = -1 in that case while l <= r: # In each", "and right pointers offset by the left pointer mid = (r - l)", "right to the number to the left of mid # this ensures that", "input is an empty array, as l = 0 and r = -1", "<reponame>fakecoinbase/sweetpandslashAlgorithms # Question: Given a sorted array, find the first and last indices", "also # catch cases when the input is an empty array, as l", "bounds in our search and if the number at the lowerIndex actually equals", "of the target (the next natural number after the targer number) # Difficulty:", "narrow into the leftmost number if nums[mid] < target: l = mid +", "right pointers offset by the left pointer mid = (r - l) //", "index it finds which is going to be the rightmost target upperIndex =", "be <= and not just != means it can also # catch cases", "len(nums) - 1 # Note: setting this while statement to be <= and", "out of bounds in our search and if the number at the lowerIndex", "-> List[int]: def lowerBin(nums, target): l, r = 0, len(nums) - 1 #", "- 1 return l # This simply finds the index of the lowest", "# Solution: Run two (lower bounded) binary searches, one for the target number", "l # This binary search always returns the lower bound on a number", "ensures that if numbers are duplicated the search will always narrow into the", "def searchRange(nums: List[int], target: int) -> List[int]: def lowerBin(nums, target): l, r =", "lower(nums, target + 1) - 1 # If we didn't go out of", "it shifts right to the number to the left of mid # this", "not just != means it can also # catch cases when the input", "l # This simply finds the index of the lowest target lowerIndex =", "target number and one # for the successor of the target (the next", "// 2 + l # This binary search always returns the lower bound", "always narrow into the leftmost number if nums[mid] < target: l = mid", "target) # This finds the index of the first number larger than the", "number if it didn't exist) we can return the indices if lowerIndex <", "next number to the right of mid, # and if the number is", "than or equal to the target it shifts right to the number to", "to the target it shifts right to the number to the left of", "# and right pointers offset by the left pointer mid = (r -", "by the left pointer mid = (r - l) // 2 + l", "int) -> List[int]: def lowerBin(nums, target): l, r = 0, len(nums) - 1", "target upperIndex = lower(nums, target + 1) - 1 # If we didn't", "target + 1) - 1 # If we didn't go out of bounds", "first and last indices of a target number # Solution: Run two (lower", "our search and if the number at the lowerIndex actually equals our #", "it finds which is going to be the rightmost target upperIndex = lower(nums,", "going to be the rightmost target upperIndex = lower(nums, target + 1) -", "else: r = mid - 1 return l # This simply finds the", "sorted array, find the first and last indices of a target number #", "while l <= r: # In each iteration set the midpoint to half", "from the index it finds which is going to be the rightmost target", "and last indices of a target number # Solution: Run two (lower bounded)", "to be the rightmost target upperIndex = lower(nums, target + 1) - 1", "always returns the lower bound on a number because # if the current", "the first number larger than the target, and then subtracts # one from", "to the right of mid, # and if the number is greater than", "didn't exist) we can return the indices if lowerIndex < len(nums) and nums[lowerIndex]", "left # and right pointers offset by the left pointer mid = (r", "set the midpoint to half the difference of the left # and right", "right of mid, # and if the number is greater than or equal", "this ensures that if numbers are duplicated the search will always narrow into", "leftmost number if nums[mid] < target: l = mid + 1 else: r", "l = mid + 1 else: r = mid - 1 return l", "on a number because # if the current number is less than target", "binary search always returns the lower bound on a number because # if", "can also # catch cases when the input is an empty array, as", "less than target it shifts left to the next number to the right", "finds which is going to be the rightmost target upperIndex = lower(nums, target", "iteration set the midpoint to half the difference of the left # and", "the search will always narrow into the leftmost number if nums[mid] < target:", "This binary search always returns the lower bound on a number because #", "at the lowerIndex actually equals our # target (because our binary search will", "if it didn't exist) we can return the indices if lowerIndex < len(nums)", "the indices if lowerIndex < len(nums) and nums[lowerIndex] == target: return [lowerIndex, upperIndex]", "number to the right of mid, # and if the number is greater", "this while statement to be <= and not just != means it can", "# target (because our binary search will return the next largest number if", "l, r = 0, len(nums) - 1 # Note: setting this while statement", "r: # In each iteration set the midpoint to half the difference of", "= lower(nums, target + 1) - 1 # If we didn't go out", "finds the index of the first number larger than the target, and then", "0, len(nums) - 1 # Note: setting this while statement to be <=", "after the targer number) # Difficulty: Medium def searchRange(nums: List[int], target: int) ->", "that case while l <= r: # In each iteration set the midpoint", "we can return the indices if lowerIndex < len(nums) and nums[lowerIndex] == target:" ]
[ "as F import numpy as np from torch.nn.utils.rnn import pad_sequence from torch.nn.functional import", "-np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4 = -np.random.rand(5,3) # logits1 = torch.Tensor(logits1) logits2 =", "reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1),", "Variable import torch.nn as nn # import torch.nn.functional as F import numpy as", "kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss) # kd_loss", "log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')( # log_softmax(student_logits,", "torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits = pad_sequence([logits1,", "torch.nn.functional as F import numpy as np from torch.nn.utils.rnn import pad_sequence from torch.nn.functional", "= torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits =", "= pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) #", "as nn # import torch.nn.functional as F import numpy as np from torch.nn.utils.rnn", "dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')( # log_softmax(student_logits, dim=-1),", "# logits1 = torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4 = torch.Tensor(logits4)", "logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss =", "= -np.random.rand(5,3) # logits1 = torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4", "kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')(", "from torch.nn.functional import log_softmax if __name__=='__main__': logits1 = -np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3", "pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss", "torch.nn.functional import log_softmax if __name__=='__main__': logits1 = -np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3 =", "softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1))", "= nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')( #", "torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3, logits2]) kd_loss", "-np.random.rand(5,3) # logits1 = torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4 =", "torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')(", "import numpy as np from torch.nn.utils.rnn import pad_sequence from torch.nn.functional import softmax from", "= -np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4 = -np.random.rand(5,3) # logits1 = torch.Tensor(logits1) logits2", "pad_sequence from torch.nn.functional import softmax from torch.nn.functional import log_softmax if __name__=='__main__': logits1 =", "nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')(", "teacher_logits = pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits,", "softmax from torch.nn.functional import log_softmax if __name__=='__main__': logits1 = -np.random.rand(7,3) logits2 = -np.random.rand(5,3)", "# log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')( #", "dim=-1)) print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) #", "log_softmax if __name__=='__main__': logits1 = -np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4", "= nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss) # kd_loss =", "logits4 = -np.random.rand(5,3) # logits1 = torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3 = torch.Tensor(logits3)", "logits2]) student_logits = pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1))", "F import numpy as np from torch.nn.utils.rnn import pad_sequence from torch.nn.functional import softmax", "= -np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4 = -np.random.rand(5,3) # logits1", "import Variable import torch.nn as nn # import torch.nn.functional as F import numpy", "__name__=='__main__': logits1 = -np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4 = -np.random.rand(5,3)", "-np.random.rand(7,3) logits4 = -np.random.rand(5,3) # logits1 = torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3 =", "logits3 = -np.random.rand(7,3) logits4 = -np.random.rand(5,3) # logits1 = torch.Tensor(logits1) logits2 = torch.Tensor(logits2)", "pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits,", "import torch # from torch.autograd import Variable import torch.nn as nn # import", "print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss)", "torch.autograd import Variable import torch.nn as nn # import torch.nn.functional as F import", "= torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3, logits2])", "if __name__=='__main__': logits1 = -np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4 =", "logits2 = -np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4 = -np.random.rand(5,3) # logits1 = torch.Tensor(logits1)", "= -np.random.rand(7,3) logits4 = -np.random.rand(5,3) # logits1 = torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3", "torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2]) student_logits =", "import pad_sequence from torch.nn.functional import softmax from torch.nn.functional import log_softmax if __name__=='__main__': logits1", "# from torch.autograd import Variable import torch.nn as nn # import torch.nn.functional as", "# kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss) #", "torch # from torch.autograd import Variable import torch.nn as nn # import torch.nn.functional", "as np from torch.nn.utils.rnn import pad_sequence from torch.nn.functional import softmax from torch.nn.functional import", "torch.nn as nn # import torch.nn.functional as F import numpy as np from", "dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits,", "# import torch.nn.functional as F import numpy as np from torch.nn.utils.rnn import pad_sequence", "logits4 = torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3, logits2]) kd_loss =", "softmax(teacher_logits, dim=-1)) # print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits,", "logits3 = torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3,", "logits2 = torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2])", "from torch.autograd import Variable import torch.nn as nn # import torch.nn.functional as F", "from torch.nn.functional import softmax from torch.nn.functional import log_softmax if __name__=='__main__': logits1 = -np.random.rand(7,3)", "dim=-1)) # print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1))", "logits1 = torch.Tensor(logits1) logits2 = torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits", "logits1 = -np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4 = -np.random.rand(5,3) #", "# print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) #", "-np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3 = -np.random.rand(7,3) logits4 = -np.random.rand(5,3) # logits1 =", "nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits,", "= torch.Tensor(logits2) logits3 = torch.Tensor(logits3) logits4 = torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2]) student_logits", "<gh_stars>1-10 import torch # from torch.autograd import Variable import torch.nn as nn #", "import log_softmax if __name__=='__main__': logits1 = -np.random.rand(7,3) logits2 = -np.random.rand(5,3) logits3 = -np.random.rand(7,3)", "torch.nn.utils.rnn import pad_sequence from torch.nn.functional import softmax from torch.nn.functional import log_softmax if __name__=='__main__':", "torch.nn.functional import softmax from torch.nn.functional import log_softmax if __name__=='__main__': logits1 = -np.random.rand(7,3) logits2", "import torch.nn as nn # import torch.nn.functional as F import numpy as np", "import torch.nn.functional as F import numpy as np from torch.nn.utils.rnn import pad_sequence from", "= pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1),", "np from torch.nn.utils.rnn import pad_sequence from torch.nn.functional import softmax from torch.nn.functional import log_softmax", "numpy as np from torch.nn.utils.rnn import pad_sequence from torch.nn.functional import softmax from torch.nn.functional", "print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), # softmax(teacher_logits, dim=-1)) # print(kd_loss)", "nn # import torch.nn.functional as F import numpy as np from torch.nn.utils.rnn import", "log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='batchmean')( # log_softmax(student_logits, dim=-1), #", "= torch.Tensor(logits4) teacher_logits = pad_sequence([logits1, logits2]) student_logits = pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False,", "from torch.nn.utils.rnn import pad_sequence from torch.nn.functional import softmax from torch.nn.functional import log_softmax if", "# softmax(teacher_logits, dim=-1)) # print(kd_loss) # kd_loss = nn.KLDivLoss(reduction='sum')( # log_softmax(student_logits, dim=-1), #", "import softmax from torch.nn.functional import log_softmax if __name__=='__main__': logits1 = -np.random.rand(7,3) logits2 =", "student_logits = pad_sequence([logits3, logits2]) kd_loss = nn.KLDivLoss(reduce=False, reduction='none')( log_softmax(student_logits, dim=-1), softmax(teacher_logits, dim=-1)) print(kd_loss)" ]
[ "= hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res = hcl2.loads(\"\"\" resource", "resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res = hcl2.loads(\"\"\"", "hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\"", "class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name =", "hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal = false load_balancer_type =", "scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\"", "= \"network\" subnets = aws_subnet.public.*.id } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf)", "TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\"", "\"test_success\" { name = \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets =", "\"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets", "name = \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id }", "name = \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection", "from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\"", "resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.PASSED, scan_result) if __name__ == '__main__': unittest.main()", "= false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed']", "def test_success(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" { name = \"test-lb-tf\" internal", "= false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = false } \"\"\")", "false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = false } \"\"\") resource_conf", "aws_subnet.public.*.id enable_deletion_protection = true } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.PASSED,", "hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\"", "= check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" {", "= \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection =", "internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = false }", "= true } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.PASSED, scan_result) if", "scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\"", "} \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res", "import check from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(\"\"\"", "hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal = false", "= check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" {", "\"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = false } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result", "check from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(\"\"\" resource", "resource \"aws_lb\" \"test_success\" { name = \"test-lb-tf\" internal = false load_balancer_type = \"network\"", "{ name = \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id", "= aws_subnet.public.*.id enable_deletion_protection = true } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf)", "enable_deletion_protection = false } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result)", "= false } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def", "internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id } \"\"\") resource_conf =", "test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal =", "<gh_stars>1-10 import unittest import hcl2 from checkov.terraform.checks.resource.aws.LBDeletionProtection import check from checkov.common.models.enums import CheckResult", "= \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = false } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed']", "aws_subnet.public.*.id enable_deletion_protection = false } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED,", "\"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id } \"\"\") resource_conf", "unittest import hcl2 from checkov.terraform.checks.resource.aws.LBDeletionProtection import check from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase):", "import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" {", "load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = false } \"\"\") resource_conf =", "resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\"", "} \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res", "resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal = false load_balancer_type = \"network\"", "= aws_subnet.public.*.id enable_deletion_protection = false } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf)", "= aws_subnet.public.*.id } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def", "hcl2 from checkov.terraform.checks.resource.aws.LBDeletionProtection import check from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self):", "checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\"", "\"network\" subnets = aws_subnet.public.*.id } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED,", "check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" { name", "scan_result) def test_success(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" { name = \"test-lb-tf\"", "self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name =", "\"test_failed\" { name = \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets =", "internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = true }", "aws_subnet.public.*.id } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self):", "self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" { name =", "test_failure(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal =", "checkov.terraform.checks.resource.aws.LBDeletionProtection import check from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res =", "true } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.PASSED, scan_result) if __name__", "\"aws_lb\" \"test_success\" { name = \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets", "} \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.PASSED, scan_result) if __name__ ==", "load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = true } \"\"\") resource_conf =", "= hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" { name = \"test-lb-tf\" internal = false load_balancer_type", "false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = true } \"\"\") resource_conf", "enable_deletion_protection = true } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.PASSED, scan_result)", "\"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res =", "import unittest import hcl2 from checkov.terraform.checks.resource.aws.LBDeletionProtection import check from checkov.common.models.enums import CheckResult class", "subnets = aws_subnet.public.*.id enable_deletion_protection = false } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result =", "= \"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id } \"\"\")", "\"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = true } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result", "CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name", "= false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = true } \"\"\")", "= hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal = false load_balancer_type", "false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result", "subnets = aws_subnet.public.*.id } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result)", "def test_failure(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal", "\"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = true", "= \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = true } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success']", "subnets = aws_subnet.public.*.id enable_deletion_protection = true } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result =", "from checkov.terraform.checks.resource.aws.LBDeletionProtection import check from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def test_failure(self): hcl_res", "\"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res =", "= hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource", "hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" { name = \"test-lb-tf\" internal = false load_balancer_type =", "def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name = \"test-lb-tf\" internal", "load_balancer_type = \"network\" subnets = aws_subnet.public.*.id } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result =", "import hcl2 from checkov.terraform.checks.resource.aws.LBDeletionProtection import check from checkov.common.models.enums import CheckResult class TestLBDeletionProtection(unittest.TestCase): def", "test_success(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" { name = \"test-lb-tf\" internal =", "\"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_success'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.PASSED, scan_result) if __name__ == '__main__':", "check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_failed\" { name", "\"test-lb-tf\" internal = false load_balancer_type = \"network\" subnets = aws_subnet.public.*.id enable_deletion_protection = false", "scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_success(self): hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\"", "false } \"\"\") resource_conf = hcl_res['resource'][0]['aws_lb']['test_failed'] scan_result = check.scan_resource_conf(conf=resource_conf) self.assertEqual(CheckResult.FAILED, scan_result) def test_failure_missing_attribute(self):", "hcl_res = hcl2.loads(\"\"\" resource \"aws_lb\" \"test_success\" { name = \"test-lb-tf\" internal = false" ]
[ "('filmfestival', '0030_reward'), ] operations = [ migrations.AddField( model_name='film', name='stills', field=models.ForeignKey(blank=True, to='material.Album', null=True), ),", "-*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations", "unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('material', '0002_auto_20170327_0215'),", "[ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations = [ migrations.AddField( model_name='film', name='stills', field=models.ForeignKey(blank=True,", "migrations class Migration(migrations.Migration): dependencies = [ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations =", "coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class", "class Migration(migrations.Migration): dependencies = [ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations = [", "<gh_stars>1-10 # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import", "models, migrations class Migration(migrations.Migration): dependencies = [ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations", "import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('material',", "-*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies", "('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations = [ migrations.AddField( model_name='film', name='stills', field=models.ForeignKey(blank=True, to='material.Album',", "from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies =", "'0030_reward'), ] operations = [ migrations.AddField( model_name='film', name='stills', field=models.ForeignKey(blank=True, to='material.Album', null=True), ), ]", "__future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [", "django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'),", "Migration(migrations.Migration): dependencies = [ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations = [ migrations.AddField(", "from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('material', '0002_auto_20170327_0215'), ('filmfestival',", "'0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations = [ migrations.AddField( model_name='film', name='stills', field=models.ForeignKey(blank=True, to='material.Album', null=True),", "= [ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations = [ migrations.AddField( model_name='film', name='stills',", "import models, migrations class Migration(migrations.Migration): dependencies = [ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ]", "utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration):", "# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models,", "dependencies = [ ('material', '0002_auto_20170327_0215'), ('filmfestival', '0030_reward'), ] operations = [ migrations.AddField( model_name='film'," ]
[ "Args(parser): parser.add_argument( '--instance', '-i', help='Cloud SQL instance ID.') def Filter(self, tool_context, args): if", "for managing SSL certificates of Cloud SQL instances.\"\"\" from googlecloudsdk.calliope import base from", "2013 Google Inc. All Rights Reserved. \"\"\"Provide commands for managing SSL certificates of", "class SslCerts(base.Group): \"\"\"Provide commands for managing SSL certificates of Cloud SQL instances. Provide", "and getting information about certificates. \"\"\" @staticmethod def Args(parser): parser.add_argument( '--instance', '-i', help='Cloud", "SQL instances.\"\"\" from googlecloudsdk.calliope import base from googlecloudsdk.calliope import exceptions class SslCerts(base.Group): \"\"\"Provide", "base from googlecloudsdk.calliope import exceptions class SslCerts(base.Group): \"\"\"Provide commands for managing SSL certificates", "Inc. All Rights Reserved. \"\"\"Provide commands for managing SSL certificates of Cloud SQL", "SSL certificates of Cloud SQL instances.\"\"\" from googlecloudsdk.calliope import base from googlecloudsdk.calliope import", "SSL certificates of Cloud SQL instances, including creating, deleting, listing, and getting information", "exceptions class SslCerts(base.Group): \"\"\"Provide commands for managing SSL certificates of Cloud SQL instances.", "managing SSL certificates of Cloud SQL instances.\"\"\" from googlecloudsdk.calliope import base from googlecloudsdk.calliope", "for managing SSL certificates of Cloud SQL instances, including creating, deleting, listing, and", "from googlecloudsdk.calliope import exceptions class SslCerts(base.Group): \"\"\"Provide commands for managing SSL certificates of", "for managing SSL certificates of Cloud SQL instances. Provide commands for managing SSL", "ID.') def Filter(self, tool_context, args): if not args.instance: raise exceptions.ToolException('argument --instance/-i is required')", "Cloud SQL instances. Provide commands for managing SSL certificates of Cloud SQL instances,", "SQL instance ID.') def Filter(self, tool_context, args): if not args.instance: raise exceptions.ToolException('argument --instance/-i", "of Cloud SQL instances, including creating, deleting, listing, and getting information about certificates.", "SslCerts(base.Group): \"\"\"Provide commands for managing SSL certificates of Cloud SQL instances. Provide commands", "of Cloud SQL instances.\"\"\" from googlecloudsdk.calliope import base from googlecloudsdk.calliope import exceptions class", "of Cloud SQL instances. Provide commands for managing SSL certificates of Cloud SQL", "instances. Provide commands for managing SSL certificates of Cloud SQL instances, including creating,", "Cloud SQL instances, including creating, deleting, listing, and getting information about certificates. \"\"\"", "def Args(parser): parser.add_argument( '--instance', '-i', help='Cloud SQL instance ID.') def Filter(self, tool_context, args):", "parser.add_argument( '--instance', '-i', help='Cloud SQL instance ID.') def Filter(self, tool_context, args): if not", "import exceptions class SslCerts(base.Group): \"\"\"Provide commands for managing SSL certificates of Cloud SQL", "help='Cloud SQL instance ID.') def Filter(self, tool_context, args): if not args.instance: raise exceptions.ToolException('argument", "All Rights Reserved. \"\"\"Provide commands for managing SSL certificates of Cloud SQL instances.\"\"\"", "certificates of Cloud SQL instances.\"\"\" from googlecloudsdk.calliope import base from googlecloudsdk.calliope import exceptions", "import base from googlecloudsdk.calliope import exceptions class SslCerts(base.Group): \"\"\"Provide commands for managing SSL", "\"\"\"Provide commands for managing SSL certificates of Cloud SQL instances. Provide commands for", "\"\"\"Provide commands for managing SSL certificates of Cloud SQL instances.\"\"\" from googlecloudsdk.calliope import", "commands for managing SSL certificates of Cloud SQL instances, including creating, deleting, listing,", "commands for managing SSL certificates of Cloud SQL instances. Provide commands for managing", "deleting, listing, and getting information about certificates. \"\"\" @staticmethod def Args(parser): parser.add_argument( '--instance',", "\"\"\" @staticmethod def Args(parser): parser.add_argument( '--instance', '-i', help='Cloud SQL instance ID.') def Filter(self,", "googlecloudsdk.calliope import exceptions class SslCerts(base.Group): \"\"\"Provide commands for managing SSL certificates of Cloud", "Provide commands for managing SSL certificates of Cloud SQL instances, including creating, deleting,", "information about certificates. \"\"\" @staticmethod def Args(parser): parser.add_argument( '--instance', '-i', help='Cloud SQL instance", "listing, and getting information about certificates. \"\"\" @staticmethod def Args(parser): parser.add_argument( '--instance', '-i',", "Reserved. \"\"\"Provide commands for managing SSL certificates of Cloud SQL instances.\"\"\" from googlecloudsdk.calliope", "SQL instances, including creating, deleting, listing, and getting information about certificates. \"\"\" @staticmethod", "certificates of Cloud SQL instances, including creating, deleting, listing, and getting information about", "instances, including creating, deleting, listing, and getting information about certificates. \"\"\" @staticmethod def", "instance ID.') def Filter(self, tool_context, args): if not args.instance: raise exceptions.ToolException('argument --instance/-i is", "# Copyright 2013 Google Inc. All Rights Reserved. \"\"\"Provide commands for managing SSL", "'--instance', '-i', help='Cloud SQL instance ID.') def Filter(self, tool_context, args): if not args.instance:", "commands for managing SSL certificates of Cloud SQL instances.\"\"\" from googlecloudsdk.calliope import base", "@staticmethod def Args(parser): parser.add_argument( '--instance', '-i', help='Cloud SQL instance ID.') def Filter(self, tool_context,", "googlecloudsdk.calliope import base from googlecloudsdk.calliope import exceptions class SslCerts(base.Group): \"\"\"Provide commands for managing", "including creating, deleting, listing, and getting information about certificates. \"\"\" @staticmethod def Args(parser):", "Copyright 2013 Google Inc. All Rights Reserved. \"\"\"Provide commands for managing SSL certificates", "certificates. \"\"\" @staticmethod def Args(parser): parser.add_argument( '--instance', '-i', help='Cloud SQL instance ID.') def", "managing SSL certificates of Cloud SQL instances, including creating, deleting, listing, and getting", "Cloud SQL instances.\"\"\" from googlecloudsdk.calliope import base from googlecloudsdk.calliope import exceptions class SslCerts(base.Group):", "managing SSL certificates of Cloud SQL instances. Provide commands for managing SSL certificates", "instances.\"\"\" from googlecloudsdk.calliope import base from googlecloudsdk.calliope import exceptions class SslCerts(base.Group): \"\"\"Provide commands", "Google Inc. All Rights Reserved. \"\"\"Provide commands for managing SSL certificates of Cloud", "Rights Reserved. \"\"\"Provide commands for managing SSL certificates of Cloud SQL instances.\"\"\" from", "certificates of Cloud SQL instances. Provide commands for managing SSL certificates of Cloud", "SQL instances. Provide commands for managing SSL certificates of Cloud SQL instances, including", "creating, deleting, listing, and getting information about certificates. \"\"\" @staticmethod def Args(parser): parser.add_argument(", "getting information about certificates. \"\"\" @staticmethod def Args(parser): parser.add_argument( '--instance', '-i', help='Cloud SQL", "about certificates. \"\"\" @staticmethod def Args(parser): parser.add_argument( '--instance', '-i', help='Cloud SQL instance ID.')", "from googlecloudsdk.calliope import base from googlecloudsdk.calliope import exceptions class SslCerts(base.Group): \"\"\"Provide commands for", "SSL certificates of Cloud SQL instances. Provide commands for managing SSL certificates of", "'-i', help='Cloud SQL instance ID.') def Filter(self, tool_context, args): if not args.instance: raise" ]
[ "EFS from .utils import J, setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def efs(ctx,", "setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def efs(ctx, profile_name): setup(ctx, profile_name) @efs.command() @click.pass_context", "click from layerslib import efs as EFS from .utils import J, setup @click.group()", "J, setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def efs(ctx, profile_name): setup(ctx, profile_name) @efs.command()", "import click from layerslib import efs as EFS from .utils import J, setup", "as EFS from .utils import J, setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def", "@click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def efs(ctx, profile_name): setup(ctx, profile_name) @efs.command() @click.pass_context def", "@click.pass_context def efs(ctx, profile_name): setup(ctx, profile_name) @efs.command() @click.pass_context def efs_list(ctx): data = EFS.get_filesystem()", "from layerslib import efs as EFS from .utils import J, setup @click.group() @click.option(\"--profile_name\",", "<reponame>hdknr/py-layers import click from layerslib import efs as EFS from .utils import J,", "import J, setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def efs(ctx, profile_name): setup(ctx, profile_name)", "from .utils import J, setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def efs(ctx, profile_name):", "layerslib import efs as EFS from .utils import J, setup @click.group() @click.option(\"--profile_name\", \"-p\",", "@click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def efs(ctx, profile_name): setup(ctx, profile_name) @efs.command() @click.pass_context def efs_list(ctx):", "\"-p\", default=None) @click.pass_context def efs(ctx, profile_name): setup(ctx, profile_name) @efs.command() @click.pass_context def efs_list(ctx): data", ".utils import J, setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context def efs(ctx, profile_name): setup(ctx,", "default=None) @click.pass_context def efs(ctx, profile_name): setup(ctx, profile_name) @efs.command() @click.pass_context def efs_list(ctx): data =", "import efs as EFS from .utils import J, setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None)", "efs as EFS from .utils import J, setup @click.group() @click.option(\"--profile_name\", \"-p\", default=None) @click.pass_context", "def efs(ctx, profile_name): setup(ctx, profile_name) @efs.command() @click.pass_context def efs_list(ctx): data = EFS.get_filesystem() click.echo(J(data))" ]
[ "profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA", "for k in credentials_lists: # GIVEN: CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name) for in_both", "prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意:", "にも Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile に Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list", "profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\"", "data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there is some roles related to the profile if", "profile prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr() assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in", "utf-8 -*- from random import randint from set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper import", "THEN: The file is not exist assert not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する", "profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws account id を検索した場合", "id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN: int の aws account id が取得できている", "account id を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file() # WHEN: check the existence of", "data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role,", "= data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id) == int # テスト ユーザー入力の AWSアカウントID が", "GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake path # GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file()", "int assert not is_int # テスト ユーザー入力の AWSアカウントID が int の場合、True が返される def", "GIVEN: CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name) for in_both in perfect_profile_list: assert isinstance(in_both, ProfileTuple)", "が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake", "account id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN: int の aws account id", "not an int assert not is_int # テスト ユーザー入力の AWSアカウントID が int の場合、True", "capsys, monkeypatch): # GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No info for profile", "capsys.readouterr() # THEN: prompt usable profile name for p in perfect_profile_list: if p.aws_secret_access_key", "Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created data_manager.prepare_aws_account_id_file() # WHEN: Check the", "= perfect_profile_list[0] # GIVEN: 下記aws account id を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file() #", "profile = perfect_profile_list[0] # WHEN: call the function aws_account_id = data_manager.get_aws_account_id(profile) # THEN:", "of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is not exist assert", "がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN:", "the existence of info for the given profile data_manager.get_aws_account_id(profile) # THEN: Prompt message", "ではない場合、を Mock user_input_not_int = \"hogehoge\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda", "AWSアカウントID が int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock", "is changed to fake path # GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN:", "included in stdout assert \") \" + p.name in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch):", "######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path of", "# GIVEN: Profile に Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list = [] credentials_name_list =", "そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN: ask to input aws account id for the", "= \"hogehoge\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) #", "THEN: assert type(aws_account_id) == int # テスト ユーザー入力の AWSアカウントID が int じゃない場合、False が返される", "random import randint from set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper import helper from set_aws_mfa", "in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa", "!= 0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input num", "perfect profile list # GIVEN: Mock user input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda", "テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN a", "AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is exist assert is_the_file_exists #", "profile = perfect_profile_list[0] # WHEN create a new aws account file if not", "# GIVEN: a valid profile which can switch role # WHEN: Check a", "prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する", "# WHEN create a new aws account file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else:", "profile を指定する profile = perfect_profile_list[0] # GIVEN: 下記aws account id を取得したとする aws_account_id =", "from random import randint from set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper import helper from", "の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN:", "profile which can switch role # WHEN: Check a role related to a", "role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there is some roles related to the", "monkeypatch): # GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No info for profile exists", "# Get aws account info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files):", "# WHEN: Check a role related to a given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role,", "ProfileTuple に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is not None: # THEN: credentials にも", "in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect profile list # GIVEN: Mock", "in_both.name in credentials_name_list assert in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN:", "out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、", "Given: ProfileTuple の name だけを抽出する profile_name_list.append(i.name) for k in credentials_lists: # GIVEN: CredentialsTuple", "the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # GIVEN: the path of", "\"\"\"テスト: 取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile に Credentials の値も合わせた ProfileTuple", "def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake path #", "helper from set_aws_mfa import validate from set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper import IntObject", "file is exist assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files,", "capsys): # GIVEN a Profile profile = perfect_profile_list[0] # WHEN create a new", "def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a ProfileTuple profile", "validate from set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper import IntObject from set_aws_mfa import prompts", "profile instance for the input number profile_instance = data_manager.get_specified_profile( perfect_profile_list, validated_input) # THEN:", "prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr() # THEN: prompt usable profile name for p", "check the existence of info for the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN:", "GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function mfa_arn =", "perfect_profile_list # WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr() # THEN: prompt", "path # GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象 profile を指定する profile", "を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN: int の aws account id が取得できている assert", "下記aws account id を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file() # WHEN: check the existence", "= data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there is some roles related to the profile", "から該当 profile の aws account id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN: int", "テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced", "in perfect_profile_list: assert isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key", "info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path", "ask to input aws account id for the profile prompts.prompt_for_asking_aws_account_id(profile) out, err =", "It's not an int assert is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する def", "to ask for input aws account id for the profile out, err =", "プロファイルのリストかどうか\"\"\" # GIVEN: Profile に Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list = [] credentials_name_list", "Mock user_input_not_int = \"hogehoge\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _:", "int # テスト ユーザー入力の AWSアカウントID が int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): # GIVEN:", "stdout assert \") \" + p.name in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN:", "perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list # WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err", "account id が取得できている assert type(retrieved_aws_account_id) is int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files,", "in_both in perfect_profile_list: assert isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple に aws_secret_access_key がセットされているならば if", "delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN a Profile profile = perfect_profile_list[0] # WHEN create", "set_aws_mfa import validate from set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper import IntObject from set_aws_mfa", "= [] credentials_name_list = [] for i in profile_obj_list: # Given: ProfileTuple の", "_: aws_account_id_int) # WHEN: check the existence of info for the given profile", "data_manager.create_aws_account_id_file() # WHEN: check the existence of info for the given profile data_manager.writing_aws_account_to_the_file(profile,", "for in_both in perfect_profile_list: assert isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple に aws_secret_access_key がセットされているならば", "perfect_profile_list[0] # WHEN create a new aws account file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file()", "raise # THEN: ask to input aws account id for the profile prompts.prompt_for_asking_aws_account_id(profile)", "validated_input = randint(1, len(perfect_profile_list)) # WHEN: get profile instance for the input number", "にも config にも、その profile が存在している assert in_both.name in credentials_name_list assert in_both.name in profile_name_list", "p.name in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect profile list # GIVEN:", "switch role # WHEN: Check a role related to a given profile role_for_the_profile_list", "perfect_profile_list[0] # WHEN: call the function mfa_arn = data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART", "import ProfileTuple from set_aws_mfa.helper import helper from set_aws_mfa import validate from set_aws_mfa.data import", "input num validated_input = randint(1, len(perfect_profile_list)) # WHEN: get profile instance for the", "not exist assert not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): #", "to the profile if len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list):", "が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake", "with fake path # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence()", "Get aws account info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): #", "AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: 下記aws", "= data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn def", "WHEN create a new aws account file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: #", "num validated_input = randint(1, len(perfect_profile_list)) # WHEN: get profile instance for the input", "def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list # WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list)", "が返される def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"hogehoge\" #", "is called profile = data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input - 1] ######################## #", "# GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake path # GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA", "capsys.readouterr() assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip()", "delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake path # GIVEN: Create", "credentials 両方にいる profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config", "fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No info for profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA #", "# WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr() # THEN: prompt usable", "# THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list):", "assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA ARN を取得する def", "# WHEN: call the function aws_account_id = data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id) ==", "Get profiles ######################## # 1. config, credentials 両方にいる profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list,", "に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is not None: # THEN: credentials にも config", "id が取得できている assert type(retrieved_aws_account_id) is int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list,", "= \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) #", "list # GIVEN: Mock user input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input)", "not None: # THEN: credentials にも config にも、その profile が存在している assert in_both.name in", "GIVEN: validated input num validated_input = randint(1, len(perfect_profile_list)) # WHEN: get profile instance", "credentials_name_list.append(k.name) for in_both in perfect_profile_list: assert isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple に aws_secret_access_key", "user_input_not_int) # WHEN: Validate the input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's", "# WHEN: check the existence of info for the given profile data_manager.get_aws_account_id(profile) #", "\"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list # WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err =", "validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN: ask to input aws", "assert not is_int # テスト ユーザー入力の AWSアカウントID が int の場合、True が返される def test_user_input_is_int(monkeypatch):", "given profile data_manager.get_aws_account_id(profile) # THEN: Prompt message to ask for input aws account", "WHEN: get profile instance for the input number profile_instance = data_manager.get_specified_profile( perfect_profile_list, validated_input)", "is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is exist assert is_the_file_exists # テスト", "# テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA", "out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" #", "の場合、True が返される def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"12345\"", "# THEN: The file is not exist assert not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa", "integer ではない場合、を Mock user_input_not_int = \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS,", "# GIVEN: No info for profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象", "function is called profile = data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input - 1] ########################", "ARN を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN:", "role related to a given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there", "Mock aws_account_id_int = \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _:", "data_manager.get_aws_account_id(profile) # THEN: Prompt message to ask for input aws account id for", "replaced with fake path # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists =", "not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the path", "GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No info for profile exists in fake", "to fake path # GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象 profile", "にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile に Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list = []", "exist # WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created data_manager.prepare_aws_account_id_file() #", "profile_obj_list: # Given: ProfileTuple の name だけを抽出する profile_name_list.append(i.name) for k in credentials_lists: #", "~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a", "ask for input aws account id for the profile out, err = capsys.readouterr()", "CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name) for in_both in perfect_profile_list: assert isinstance(in_both, ProfileTuple) #", "create a new aws account file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに", "the input number profile_instance = data_manager.get_specified_profile( perfect_profile_list, validated_input) # THEN: assert isinstance(profile_instance, ProfileTuple)", "int の aws account id が取得できている assert type(retrieved_aws_account_id) is int # テスト ~/.aws_accounts_for_data_manager", "input aws account id for the profile out, err = capsys.readouterr() print(out.rstrip()) assert", "given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there is some roles related", "assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、", "を指定する profile = perfect_profile_list[0] # GIVEN: 下記aws account id を取得したとする aws_account_id = 12345", "the existence of info for the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA", "import validate from set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper import IntObject from set_aws_mfa import", "None: # \") profile_name\" is included in stdout assert \") \" + p.name", "の値も合わせた ProfileTuple のリストを取得する profile_name_list = [] credentials_name_list = [] for i in profile_obj_list:", "path # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist # WHEN: Try", "set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper import IntObject from set_aws_mfa import prompts from tests.conftest", "roles related to the profile if len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name", "data_manager from set_aws_mfa.helper.helper import IntObject from set_aws_mfa import prompts from tests.conftest import BUILTIN_INPUTS", "GIVEN: perfect profile list # GIVEN: Mock user input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS,", "assert in_both.name in credentials_name_list assert in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" #", "# THEN: credentials にも config にも、その profile が存在している assert in_both.name in credentials_name_list assert", "prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr() assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip()", "call the function aws_account_id = data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id) == int #", "there is some roles related to the profile if len(role_for_the_profile_list) != 0: assert", "assert is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): #", "BUILTIN_INPUTS ######################## # Get profiles ######################## # 1. config, credentials 両方にいる profile に、credentials", "delete_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # GIVEN:", "を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file() # WHEN: check the existence of info for", "THEN: It's not an int assert is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- from random import randint from set_aws_mfa.data.data_manager", "out, err = capsys.readouterr() # THEN: prompt usable profile name for p in", "is included in stdout assert \") \" + p.name in out.strip() def test_get_selected_profile(perfect_profile_list,", "# WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created data_manager.prepare_aws_account_id_file() # WHEN:", "WHEN: Validate the input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an", "WHEN: call the function mfa_arn = data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART", "is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert is_int #", "function mfa_arn = data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in", "~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN a Profile", "data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id) == int # テスト ユーザー入力の AWSアカウントID が int", "def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path #", "1. config, credentials 両方にいる profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential", "the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist # WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA", "exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] #", "WHEN: ProfileTuple に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is not None: # THEN: credentials", "user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) # WHEN: this function is called", "tests.conftest import BUILTIN_INPUTS ######################## # Get profiles ######################## # 1. config, credentials 両方にいる", "GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist # WHEN: Try to prepare", "WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created data_manager.prepare_aws_account_id_file() # WHEN: Check", "test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"12345\" # GIVEN: Mock", "= helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert not is_int #", "validated input num validated_input = randint(1, len(perfect_profile_list)) # WHEN: get profile instance for", "# THEN: assert type(aws_account_id) == int # テスト ユーザー入力の AWSアカウントID が int じゃない場合、False", "input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert not", "対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: 下記aws account id を取得したとする aws_account_id", "# WHEN: call the function mfa_arn = data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert", "GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"hogehoge\" # GIVEN: Mock user input", "string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN: Validate the input is_int = helper.is_input_int_loop(IntObject(),", "is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is not exist assert not is_the_file_exists", "this function is called profile = data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input - 1]", "== profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input num validated_input = randint(1, len(perfect_profile_list))", "profile_name\" is included in stdout assert \") \" + p.name in out.strip() def", "user_input_not_int = \"hogehoge\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int)", "# THEN: It's not an int assert not is_int # テスト ユーザー入力の AWSアカウントID", "# GIVEN: 下記aws account id を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file() # WHEN: check", "profile = perfect_profile_list[0] # GIVEN: ユーザーインプットが integer ではない場合、を Mock aws_account_id_int = \"12345\" #", "perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No", "valid profile which can switch role # WHEN: Check a role related to", "はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager", "raise raise # THEN: ask to input aws account id for the profile", "def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"hogehoge\" # GIVEN:", "existence of info for the given profile data_manager.get_aws_account_id(profile) # THEN: Prompt message to", "data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid", "of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist # WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it", "THEN: there is some roles related to the profile if len(role_for_the_profile_list) != 0:", "assert not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the", "= capsys.readouterr() # THEN: prompt usable profile name for p in perfect_profile_list: if", "Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN: Validate the input", "in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\"", "helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert not is_int # テスト", "id for the profile out, err = capsys.readouterr() print(out.rstrip()) assert profile.name in out.rstrip()", "assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() #", "def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path", "# WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws account id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile)", "integer ではない場合、を Mock aws_account_id_int = \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS,", "set_aws_mfa.helper import helper from set_aws_mfa import validate from set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper", "# \") profile_name\" is included in stdout assert \") \" + p.name in", "the function mfa_arn = data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name", "usable profile name for p in perfect_profile_list: if p.aws_secret_access_key is not None: #", "fake path # GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象 profile を指定する", "for i in profile_obj_list: # Given: ProfileTuple の name だけを抽出する profile_name_list.append(i.name) for k", "monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) # WHEN: this function is called profile = data_manager.get_selected_profile()", "# THEN: int の aws account id が取得できている assert type(retrieved_aws_account_id) is int #", "assert type(retrieved_aws_account_id) is int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files,", "is created data_manager.prepare_aws_account_id_file() # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence()", "in perfect_profile_list: if p.aws_secret_access_key is not None: # \") profile_name\" is included in", "in_both.aws_secret_access_key is not None: # THEN: credentials にも config にも、その profile が存在している assert", "が存在している assert in_both.name in credentials_name_list assert in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\"", "GIVEN: Mock user input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) # WHEN:", "credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile に Credentials", "\") \" + p.name in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect profile", "profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid profile which can switch role # WHEN:", "# WHEN: this function is called profile = data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input", "path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA", "def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager #", "profile list # GIVEN: Mock user input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _:", "validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is not exist assert not is_the_file_exists # テスト", "profile out, err = capsys.readouterr() print(out.rstrip()) assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in", "with fake path # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist #", "に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\" #", "path # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN:", "######################## # Get profiles ######################## # 1. config, credentials 両方にいる profile に、credentials の値を合体させたリストを取得する", "profile_name_list = [] credentials_name_list = [] for i in profile_obj_list: # Given: ProfileTuple", "aws_account_id = data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id) == int # テスト ユーザー入力の AWSアカウントID", "# GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象 profile を指定する profile =", "GIVEN a Profile profile = perfect_profile_list[0] # WHEN create a new aws account", "a given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there is some roles", "is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the path of", "config, credentials 両方にいる profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも", "input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) # WHEN: this function is", "the given profile data_manager.get_aws_account_id(profile) # THEN: Prompt message to ask for input aws", "# Get profiles ######################## # 1. config, credentials 両方にいる profile に、credentials の値を合体させたリストを取得する def", "test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"hogehoge\" # GIVEN: Mock", "にも、その profile が存在している assert in_both.name in credentials_name_list assert in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys,", "exist assert not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN:", "= validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is exist assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa", "aws account info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN:", "not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN: ask to input", "GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN: Validate the", "prompts from tests.conftest import BUILTIN_INPUTS ######################## # Get profiles ######################## # 1. config,", "data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN:", "data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input - 1] ######################## # Get aws account info", "input aws account id for the profile prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr() assert", "WHEN: call the function aws_account_id = data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id) == int", "credentials にも config にも、その profile が存在している assert in_both.name in credentials_name_list assert in_both.name in", "profile == perfect_profile_list[user_input - 1] ######################## # Get aws account info ######################## #", "def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"12345\" # GIVEN:", "is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert not is_int", "It's not an int assert not is_int # テスト ユーザー入力の AWSアカウントID が int", "test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid profile which can switch role #", "account info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the", "string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) # WHEN: check the existence of info for", "# WHEN: check the existence of info for the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id)", "# WHEN: ProfileTuple に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is not None: # THEN:", "# 1. config, credentials 両方にいる profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト:", "ではない場合、を Mock aws_account_id_int = \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda", "for the input number profile_instance = data_manager.get_specified_profile( perfect_profile_list, validated_input) # THEN: assert isinstance(profile_instance,", "out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA ARN を取得する", "assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a", "# GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"12345\" # GIVEN: Mock user", "mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid profile which can switch", "テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call", "Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile に Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list =", "が int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int", "for p in perfect_profile_list: if p.aws_secret_access_key is not None: # \") profile_name\" is", "assert type(aws_account_id) == int # テスト ユーザー入力の AWSアカウントID が int じゃない場合、False が返される def", "~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with", "execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr() # THEN: prompt usable profile name", "# GIVEN a Profile profile = perfect_profile_list[0] # WHEN create a new aws", "ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"12345\" # GIVEN: Mock user input string", "= data_manager.get_aws_account_id(profile) # THEN: int の aws account id が取得できている assert type(retrieved_aws_account_id) is", "だけを抽出する profile_name_list.append(i.name) for k in credentials_lists: # GIVEN: CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name)", "# テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA", "from set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper import IntObject from set_aws_mfa import prompts from", "\"\"\" # GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function", "THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\"", "called profile = data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input - 1] ######################## # Get", "profile if len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN:", "assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN: a", "The file is not exist assert not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def", "GIVEN: Profile に Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list = [] credentials_name_list = []", "が返される def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"12345\" #", "ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function aws_account_id = data_manager.get_aws_account_id(profile) #", "ユーザー入力の AWSアカウントID が int の場合、True が返される def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を", "GIVEN: No info for profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile", "to input aws account id for the profile prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr()", "GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: 下記aws account id を取得したとする", "it is created data_manager.prepare_aws_account_id_file() # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists =", "Mock user input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) # WHEN: this", "for the profile out, err = capsys.readouterr() print(out.rstrip()) assert profile.name in out.rstrip() assert", "not an int assert is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files,", "GIVEN: 下記aws account id を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file() # WHEN: check the", "profile_obj_list) # THEN: there is some roles related to the profile if len(role_for_the_profile_list)", "assert isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is not", "aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is not None: # THEN: credentials にも config にも、その", "THEN: credentials にも config にも、その profile が存在している assert in_both.name in credentials_name_list assert in_both.name", "ProfileTuple from set_aws_mfa.helper import helper from set_aws_mfa import validate from set_aws_mfa.data import data_manager", "ProfileTuple のリストを取得する profile_name_list = [] credentials_name_list = [] for i in profile_obj_list: #", "= data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input - 1] ######################## # Get aws account", "Check a role related to a given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) #", "\"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid profile which can switch role # WHEN: Check", "int の場合、True が返される def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int =", "# THEN: The file is exist assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID", "for the profile prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr() assert profile.name in out.rstrip() assert", "だけを抽出する credentials_name_list.append(k.name) for in_both in perfect_profile_list: assert isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple に", "# GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function mfa_arn", "+ p.name in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect profile list #", "path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # WHEN: Check the existence of", "input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert is_int", "test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake path # GIVEN:", "err = capsys.readouterr() # THEN: prompt usable profile name for p in perfect_profile_list:", "profile name for p in perfect_profile_list: if p.aws_secret_access_key is not None: # \")", "GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function aws_account_id =", "the input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert", "of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA", "Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is", "a valid profile which can switch role # WHEN: Check a role related", "ユーザーインプットが integer ではない場合、を Mock aws_account_id_int = \"12345\" # GIVEN: Mock user input string", "profile data_manager.get_aws_account_id(profile) # THEN: Prompt message to ask for input aws account id", "config にも、その profile が存在している assert in_both.name in credentials_name_list assert in_both.name in profile_name_list def", "# GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: ユーザーインプットが integer ではない場合、を", "Profile profile = perfect_profile_list[0] # WHEN create a new aws account file if", "= [] for i in profile_obj_list: # Given: ProfileTuple の name だけを抽出する profile_name_list.append(i.name)", "aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws account id を検索した場合 retrieved_aws_account_id =", "がセットされているならば if in_both.aws_secret_access_key is not None: # THEN: credentials にも config にも、その profile", "in credentials_name_list assert in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get", "assert in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list #", "Mock user_input_not_int = \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _:", "= perfect_profile_list[0] # WHEN: call the function mfa_arn = data_manager.get_mfa_arn(profile) # THEN: assert", "and it is created data_manager.prepare_aws_account_id_file() # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists", "a Profile profile = perfect_profile_list[0] # WHEN create a new aws account file", "~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a ProfileTuple profile = perfect_profile_list[0] #", "if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN: ask to", "import randint from set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper import helper from set_aws_mfa import", "existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is not exist", "perfect_profile_list, capsys): # GIVEN a Profile profile = perfect_profile_list[0] # WHEN create a", "GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: ユーザーインプットが integer ではない場合、を Mock", "# テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple profile = perfect_profile_list[0]", "def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input num validated_input = randint(1, len(perfect_profile_list)) # WHEN:", "def test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the", "credentials_lists: # GIVEN: CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name) for in_both in perfect_profile_list: assert", "perfect_profile_list[0] # GIVEN: ユーザーインプットが integer ではない場合、を Mock aws_account_id_int = \"12345\" # GIVEN: Mock", "test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN:", "out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN:", "helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert is_int # ~/.aws_accounts_for_set_aws_mfa に", "assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" #", "is not None: # THEN: credentials にも config にも、その profile が存在している assert in_both.name", "in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する", "= \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) #", "Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] #", "import prompts from tests.conftest import BUILTIN_INPUTS ######################## # Get profiles ######################## # 1.", "= perfect_profile_list[0] # WHEN create a new aws account file if not validate.check_aws_accounts_for_set_aws_mfa_existence():", "AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist # WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is", "\"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a ProfileTuple profile = perfect_profile_list[0]", "delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No info for", "profile を指定する profile = perfect_profile_list[0] # GIVEN: ユーザーインプットが integer ではない場合、を Mock aws_account_id_int =", "# GIVEN: validated input num validated_input = randint(1, len(perfect_profile_list)) # WHEN: get profile", "a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function mfa_arn = data_manager.get_mfa_arn(profile)", "info for the given profile data_manager.get_aws_account_id(profile) # THEN: Prompt message to ask for", "err = capsys.readouterr() assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER", "not None: # \") profile_name\" is included in stdout assert \") \" +", "AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake path # GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() #", "# GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: 下記aws account id", "の aws account id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN: int の aws", "GIVEN: ユーザーインプットが integer ではない場合、を Mock aws_account_id_int = \"12345\" # GIVEN: Mock user input", "# THEN: ask to input aws account id for the profile prompts.prompt_for_asking_aws_account_id(profile) out,", "~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with", "THEN: ask to input aws account id for the profile prompts.prompt_for_asking_aws_account_id(profile) out, err", "aws account id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN: int の aws account", "is int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch):", "call the function mfa_arn = data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert", "out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect profile list # GIVEN: Mock user", "account id for the profile prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr() assert profile.name in", "上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the", "# GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function aws_account_id", "p in perfect_profile_list: if p.aws_secret_access_key is not None: # \") profile_name\" is included", "WHEN: this function is called profile = data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input -", "# GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # WHEN: Check", "の aws account id が取得できている assert type(retrieved_aws_account_id) is int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める", "# テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN:", "# GIVEN: CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name) for in_both in perfect_profile_list: assert isinstance(in_both,", "= perfect_profile_list[0] # GIVEN: ユーザーインプットが integer ではない場合、を Mock aws_account_id_int = \"12345\" # GIVEN:", "print(out.rstrip()) assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip()", "profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input num validated_input = randint(1, len(perfect_profile_list)) #", "# THEN: prompt usable profile name for p in perfect_profile_list: if p.aws_secret_access_key is", "user_input) # WHEN: this function is called profile = data_manager.get_selected_profile() assert profile ==", "id for the profile prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr() assert profile.name in out.rstrip()", "info for the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の", "profile_name_list.append(i.name) for k in credentials_lists: # GIVEN: CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name) for", "から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a ProfileTuple", "input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN: Validate the input is_int =", "for the given profile data_manager.get_aws_account_id(profile) # THEN: Prompt message to ask for input", "in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN:", "in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA ARN", "role # WHEN: Check a role related to a given profile role_for_the_profile_list =", "from set_aws_mfa.helper.helper import IntObject from set_aws_mfa import prompts from tests.conftest import BUILTIN_INPUTS ########################", "# テスト ユーザー入力の AWSアカウントID が int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが", "in profile_obj_list: # Given: ProfileTuple の name だけを抽出する profile_name_list.append(i.name) for k in credentials_lists:", "existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is exist assert", "== perfect_profile_list[user_input - 1] ######################## # Get aws account info ######################## # テスト", "prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr() # THEN: prompt usable profile name for", "perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile に Credentials の値も合わせた", "# GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"hogehoge\" # GIVEN: Mock user", "GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # GIVEN: the path", "prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created data_manager.prepare_aws_account_id_file() # WHEN: Check the existence of", "test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN a Profile profile = perfect_profile_list[0] # WHEN", "AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is not exist assert not", "test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path #", "user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN: Validate the input is_int", "# -*- coding: utf-8 -*- from random import randint from set_aws_mfa.data.data_manager import ProfileTuple", "0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input num validated_input", "replaced with fake path # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist", "AWS_ACCOUNT_FOR_data_manager # GIVEN: No info for profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN:", "id を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file() # WHEN: check the existence of info", "WHEN: check the existence of info for the given profile data_manager.get_aws_account_id(profile) # THEN:", "not exist # WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created data_manager.prepare_aws_account_id_file()", "# GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist # WHEN: Try to", "AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: ユーザーインプットが integer", "file is not exist assert not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files,", "in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list # WHEN:", "p.aws_secret_access_key is not None: # \") profile_name\" is included in stdout assert \")", "# WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The", "The file is exist assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def", "the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws account", "is not None: # \") profile_name\" is included in stdout assert \") \"", "# GIVEN: perfect profile list # GIVEN: Mock user input user_input = 2", "test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect profile list # GIVEN: Mock user input user_input", "retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN: int の aws account id が取得できている assert type(retrieved_aws_account_id)", "data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の", "set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper import helper from set_aws_mfa import validate from set_aws_mfa.data", "\") profile_name\" is included in stdout assert \") \" + p.name in out.strip()", "profile = perfect_profile_list[0] # GIVEN: 下記aws account id を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file()", "aws account id for the profile out, err = capsys.readouterr() print(out.rstrip()) assert profile.name", "randint(1, len(perfect_profile_list)) # WHEN: get profile instance for the input number profile_instance =", "test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function", "AWSアカウントID が int の場合、True が返される def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock", "profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa", "the profile if len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): #", "= capsys.readouterr() print(out.rstrip()) assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER", "perfect_profile_list: if p.aws_secret_access_key is not None: # \") profile_name\" is included in stdout", "the function aws_account_id = data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id) == int # テスト", "else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN: ask to input aws account id", "capsys.readouterr() print(out.rstrip()) assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in", "の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN a Profile profile = perfect_profile_list[0]", "err = capsys.readouterr() print(out.rstrip()) assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert", "in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list): #", "fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN:", "# Given: ProfileTuple の name だけを抽出する profile_name_list.append(i.name) for k in credentials_lists: # GIVEN:", "for the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws", "取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile に Credentials の値も合わせた ProfileTuple のリストを取得する", "= 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) # WHEN: this function is called profile", "lambda _: user_input_not_int) # WHEN: Validate the input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) #", "is some roles related to the profile if len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile", "the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is not", "THEN: Prompt message to ask for input aws account id for the profile", "ProfileTuple の name だけを抽出する profile_name_list.append(i.name) for k in credentials_lists: # GIVEN: CredentialsTuple の", "# GIVEN: Mock user input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) #", "を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple profile = perfect_profile_list[0] # WHEN: call", "if in_both.aws_secret_access_key is not None: # THEN: credentials にも config にも、その profile が存在している", "= capsys.readouterr() assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in", "aws account file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise #", "テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple profile = perfect_profile_list[0] #", "######################## # Get aws account info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def", "テスト ユーザー入力の AWSアカウントID が int の場合、True が返される def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer", "to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created data_manager.prepare_aws_account_id_file() # WHEN: Check the existence", "name for p in perfect_profile_list: if p.aws_secret_access_key is not None: # \") profile_name\"", "data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws account id を検索した場合 retrieved_aws_account_id", "account id for the profile out, err = capsys.readouterr() print(out.rstrip()) assert profile.name in", "# WHEN: Validate the input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not", "import data_manager from set_aws_mfa.helper.helper import IntObject from set_aws_mfa import prompts from tests.conftest import", "# テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN", "perfect_profile_list[0] # GIVEN: 下記aws account id を取得したとする aws_account_id = 12345 data_manager.create_aws_account_id_file() # WHEN:", "credentials_name_list assert in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list", "exist assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list,", "を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path", "is exist assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files,", "set_aws_mfa import prompts from tests.conftest import BUILTIN_INPUTS ######################## # Get profiles ######################## #", "# ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN:", "data_manager.prepare_aws_account_id_file() # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN:", "fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: ユーザーインプットが", "assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input num validated_input =", "Profile に Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list = [] credentials_name_list = [] for", "_: user_input) # WHEN: this function is called profile = data_manager.get_selected_profile() assert profile", "profile が存在している assert in_both.name in credentials_name_list assert in_both.name in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list):", "from set_aws_mfa import prompts from tests.conftest import BUILTIN_INPUTS ######################## # Get profiles ########################", "profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there is some roles related to", "an int assert is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files,", "AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws account id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN:", "from set_aws_mfa import validate from set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper import IntObject from", "file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN: ask", "テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す def test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced", "data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN: ask to input aws account", "input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) # WHEN: check the existence of info", "create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No info", "is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): #", "に Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list = [] credentials_name_list = [] for i", "is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN:", "######################## # 1. config, credentials 両方にいる profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list):", "the profile out, err = capsys.readouterr() print(out.rstrip()) assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE", "import helper from set_aws_mfa import validate from set_aws_mfa.data import data_manager from set_aws_mfa.helper.helper import", "aws_account_id_int = \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int)", "prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list):", "のリストを取得する profile_name_list = [] credentials_name_list = [] for i in profile_obj_list: # Given:", "len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input", "fake path # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() #", "# WHEN: get profile instance for the input number profile_instance = data_manager.get_specified_profile( perfect_profile_list,", "THEN: prompt usable profile name for p in perfect_profile_list: if p.aws_secret_access_key is not", "ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"hogehoge\" # GIVEN: Mock user input string", "-*- from random import randint from set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper import helper", "if p.aws_secret_access_key is not None: # \") profile_name\" is included in stdout assert", "role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input num validated_input = randint(1,", "assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys):", "fake path # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist # WHEN:", "account file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN:", "~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is", "def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect profile list # GIVEN: Mock user input", "_: user_input_not_int) # WHEN: Validate the input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN:", "related to a given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there is", "aws account id が取得できている assert type(retrieved_aws_account_id) is int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def", "test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list # WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out,", "作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN a Profile profile", "== int # テスト ユーザー入力の AWSアカウントID が int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): #", "profile = data_manager.get_selected_profile() assert profile == perfect_profile_list[user_input - 1] ######################## # Get aws", "perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake path # GIVEN: Create fake", "given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws account id", "= randint(1, len(perfect_profile_list)) # WHEN: get profile instance for the input number profile_instance", "# THEN: there is some roles related to the profile if len(role_for_the_profile_list) !=", "created data_manager.prepare_aws_account_id_file() # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() #", "coding: utf-8 -*- from random import randint from set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper", "info for profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile を指定する profile", "mfa_arn = data_manager.get_mfa_arn(profile) # THEN: assert data_manager.AWS_IAM_ARN_HEAD_PART assert data_manager.AWS_IAM_ARN_MFA_PART assert profile.name in mfa_arn", "if len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated", "1] ######################## # Get aws account info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False を返す", "IntObject from set_aws_mfa import prompts from tests.conftest import BUILTIN_INPUTS ######################## # Get profiles", "of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is exist assert is_the_file_exists", "integer ではない場合、を Mock user_input_not_int = \"hogehoge\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS,", "ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function mfa_arn = data_manager.get_mfa_arn(profile) #", "# GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # GIVEN: the", "テスト ユーザー入力の AWSアカウントID が int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer", "# ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA", "[] for i in profile_obj_list: # Given: ProfileTuple の name だけを抽出する profile_name_list.append(i.name) for", "assert profile == perfect_profile_list[user_input - 1] ######################## # Get aws account info ########################", "GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0]", "to a given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN: there is some", "get perfect_profile_list # WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr() # THEN:", "perfect_profile_list: assert isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is", "# GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN: Validate", "a role related to a given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list) # THEN:", "in credentials_lists: # GIVEN: CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name) for in_both in perfect_profile_list:", "が取得できている assert type(retrieved_aws_account_id) is int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files,", "python3 # -*- coding: utf-8 -*- from random import randint from set_aws_mfa.data.data_manager import", "test_no_aws_accounts_for_set_aws_mfa_returns_false(set_fake_aws_account_files): # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # WHEN:", "WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file", "in out.rstrip() # テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple profile", "int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): #", "of info for the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile", "- 1] ######################## # Get aws account info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、False", "out, err = capsys.readouterr() assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert", "int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int =", "# テスト ユーザー入力の AWSアカウントID が int の場合、True が返される def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが", "int assert is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list):", "profile の aws account id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) # THEN: int の", "which can switch role # WHEN: Check a role related to a given", "GIVEN: get perfect_profile_list # WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr() #", "can switch role # WHEN: Check a role related to a given profile", "function aws_account_id = data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id) == int # テスト ユーザー入力の", "data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert not is_int # テスト ユーザー入力の", "of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is", "GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # WHEN: Check the", "Validate the input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int", "\"hogehoge\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN:", "None: # THEN: credentials にも config にも、その profile が存在している assert in_both.name in credentials_name_list", "に ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed", "# そのファイルが既に存在していた場合、書き込みをせずに raise raise # THEN: ask to input aws account id for", "aws_account_id = 12345 data_manager.create_aws_account_id_file() # WHEN: check the existence of info for the", "some roles related to the profile if len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile ==", "THEN: It's not an int assert not is_int # テスト ユーザー入力の AWSアカウントID が", "test_get_profile_instance_for_user_input(perfect_profile_list): # GIVEN: validated input num validated_input = randint(1, len(perfect_profile_list)) # WHEN: get", "a new aws account file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise", "# GIVEN: Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No info for profile exists in", "credentials_name_list = [] for i in profile_obj_list: # Given: ProfileTuple の name だけを抽出する", "def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid profile which can switch role", "of info for the given profile data_manager.get_aws_account_id(profile) # THEN: Prompt message to ask", "ではない場合、を Mock user_input_not_int = \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda", "for profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile を指定する profile =", "prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple", "validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is exist assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに", "data_manager.create_aws_account_id_file() # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: 下記aws account", "2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) # WHEN: this function is called profile =", "out, err = capsys.readouterr() print(out.rstrip()) assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip()", "from set_aws_mfa.helper import helper from set_aws_mfa import validate from set_aws_mfa.data import data_manager from", "~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN: Create fake", "じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"hogehoge\"", "# GIVEN: ユーザーインプットが integer ではない場合、を Mock aws_account_id_int = \"12345\" # GIVEN: Mock user", "profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid profile which", "profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile を指定する profile = perfect_profile_list[0]", "user input user_input = 2 monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input) # WHEN: this function", "lambda _: aws_account_id_int) # WHEN: check the existence of info for the given", "from set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper import helper from set_aws_mfa import validate from", "perfect_profile_list[0] # WHEN: call the function aws_account_id = data_manager.get_aws_account_id(profile) # THEN: assert type(aws_account_id)", "a ProfileTuple profile = perfect_profile_list[0] # WHEN: call the function aws_account_id = data_manager.get_aws_account_id(profile)", "monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN: Validate the input is_int = helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE)", "existence of info for the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) # WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当", "AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # GIVEN: the path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not", "対象 profile を指定する profile = perfect_profile_list[0] # GIVEN: ユーザーインプットが integer ではない場合、を Mock aws_account_id_int", "= perfect_profile_list[0] # WHEN: call the function aws_account_id = data_manager.get_aws_account_id(profile) # THEN: assert", "テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys, monkeypatch): # GIVEN: Create", "WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr() # THEN: prompt usable profile", "\"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) # WHEN:", "Credentials の値も合わせた ProfileTuple のリストを取得する profile_name_list = [] credentials_name_list = [] for i in", "# GIVEN: get perfect_profile_list # WHEN: execute prompt_user_selection() prompts.prompt_user_selection(perfect_profile_list) out, err = capsys.readouterr()", "import BUILTIN_INPUTS ######################## # Get profiles ######################## # 1. config, credentials 両方にいる profile", "の name だけを抽出する profile_name_list.append(i.name) for k in credentials_lists: # GIVEN: CredentialsTuple の name", "# THEN: It's not an int assert is_int # ~/.aws_accounts_for_set_aws_mfa に ユーザー入力の AWSアカウントIDを", "from tests.conftest import BUILTIN_INPUTS ######################## # Get profiles ######################## # 1. config, credentials", "randint from set_aws_mfa.data.data_manager import ProfileTuple from set_aws_mfa.helper import helper from set_aws_mfa import validate", "new aws account file if not validate.check_aws_accounts_for_set_aws_mfa_existence(): data_manager.create_aws_account_id_file() else: # そのファイルが既に存在していた場合、書き込みをせずに raise raise", "ユーザー入力の AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to", "user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) # WHEN: check the existence of", "i in profile_obj_list: # Given: ProfileTuple の name だけを抽出する profile_name_list.append(i.name) for k in", "GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int = \"12345\" # GIVEN: Mock user input", "ProfileTuple) # WHEN: ProfileTuple に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is not None: #", "Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) # WHEN: check the existence", "assert profile.name in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid profile", "def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN a Profile profile = perfect_profile_list[0] #", "が int の場合、True が返される def test_user_input_is_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を Mock user_input_not_int", "= 12345 data_manager.create_aws_account_id_file() # WHEN: check the existence of info for the given", "in stdout assert \") \" + p.name in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): #", "THEN: int の aws account id が取得できている assert type(retrieved_aws_account_id) is int # テスト", "monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) # WHEN: check the existence of info for the", "in mfa_arn def test_get_role_for_a_base_profile(profile_which_has_role, profile_obj_list): \"\"\"該当プロフィールと紐づくロールを返す\"\"\" # GIVEN: a valid profile which can", "def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile", "WHEN: check the existence of info for the given profile data_manager.writing_aws_account_to_the_file(profile, aws_account_id) #", "WHEN: Check a role related to a given profile role_for_the_profile_list = data_manager.get_role_list_for_a_profile(profile_which_has_role, profile_obj_list)", "monkeypatch): # GIVEN: perfect profile list # GIVEN: Mock user input user_input =", "THEN: The file is exist assert is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa 作成後、ユーザーに 該当ProfileのAWSアカウントID の入力を求める", "test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config にも存在する プロファイルのリストかどうか\"\"\" # GIVEN: Profile に", "を指定する profile = perfect_profile_list[0] # GIVEN: ユーザーインプットが integer ではない場合、を Mock aws_account_id_int = \"12345\"", "GIVEN: a valid profile which can switch role # WHEN: Check a role", "instance for the input number profile_instance = data_manager.get_specified_profile( perfect_profile_list, validated_input) # THEN: assert", "for input aws account id for the profile out, err = capsys.readouterr() print(out.rstrip())", "= helper.is_input_int_loop(IntObject(), data_manager.ASKING_AWS_ACCOUNT_ID_INPUT_MESSAGE) # THEN: It's not an int assert is_int # ~/.aws_accounts_for_set_aws_mfa", "ユーザー入力の AWSアカウントID が int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch): # GIVEN: ユーザーインプットが integer ではない場合、を", "prompt usable profile name for p in perfect_profile_list: if p.aws_secret_access_key is not None:", "name だけを抽出する profile_name_list.append(i.name) for k in credentials_lists: # GIVEN: CredentialsTuple の name だけを抽出する", "type(retrieved_aws_account_id) is int # テスト ~/.aws_accounts_for_data_manager はするが、該当ProfileのAWSアカウントIDが存在しない場合にユーザーに入力を求める def test_no_aws_account_id_for_given_profile_prompts_msg(set_fake_aws_account_files, perfect_profile_list, create_fake_aws_account_files, delete_fake_aws_account_files, capsys,", "related to the profile if len(role_for_the_profile_list) != 0: assert role_for_the_profile_list[0].source_profile == profile_which_has_role.name def", "the path of AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # WHEN: Check the existence", "# THEN: Prompt message to ask for input aws account id for the", "path of AWS_ACCOUNT_FOR_SET_AWS_MFA is not exist # WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and", "WHEN: AWS_ACCOUNT_FOR_SET_AWS_MFA から該当 profile の aws account id を検索した場合 retrieved_aws_account_id = data_manager.get_aws_account_id(profile) #", "not is_int # テスト ユーザー入力の AWSアカウントID が int の場合、True が返される def test_user_input_is_int(monkeypatch): #", "user_input_not_int = \"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int)", "Prompt message to ask for input aws account id for the profile out,", "[] credentials_name_list = [] for i in profile_obj_list: # Given: ProfileTuple の name", "\" + p.name in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect profile list", "test_get_aws_account_id_for_the_profile(perfect_profile_list): \"\"\"注意: ~/.aws_accounts_for_set_aws_mfa がローカルにない場合、 テスト対象のツール使用時には該当ファイルがない場合は生成、入力がなされるが、 上記生成を行う前にこのテストは実施した際はテストに失敗する \"\"\" # GIVEN: a ProfileTuple profile =", "AWSアカウントIDを 記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake", "is not exist assert not is_the_file_exists # テスト ~/.aws_accounts_for_set_aws_mfa が存在しない場合、作成する def test_create_aws_accounts_for_set_aws_mfa(set_fake_aws_account_files, delete_fake_aws_account_files):", "記入する def test_writing_aws_account_to_the_file(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list): # GIVEN: AWS_ACCOUNT_FOR_SET_AWS_MFA is changed to fake path", "aws_account_id_int) # WHEN: check the existence of info for the given profile data_manager.get_aws_account_id(profile)", "両方にいる profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists, perfect_profile_list): \"\"\"テスト: 取得したリストは、Credential にも Config にも存在する", "profiles ######################## # 1. config, credentials 両方にいる profile に、credentials の値を合体させたリストを取得する def test_get_perfect_profile_list(profile_obj_list, credentials_lists,", "is not exist # WHEN: Try to prepare AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created", "GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) # WHEN: check the", "check the existence of info for the given profile data_manager.get_aws_account_id(profile) # THEN: Prompt", "get profile instance for the input number profile_instance = data_manager.get_specified_profile( perfect_profile_list, validated_input) #", "perfect_profile_list[user_input - 1] ######################## # Get aws account info ######################## # テスト ~/.aws_accounts_for_set_aws_mfa", "の name だけを抽出する credentials_name_list.append(k.name) for in_both in perfect_profile_list: assert isinstance(in_both, ProfileTuple) # WHEN:", "in profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list # WHEN: execute", "data_manager.get_aws_account_id(profile) # THEN: int の aws account id が取得できている assert type(retrieved_aws_account_id) is int", "set_aws_mfa.helper.helper import IntObject from set_aws_mfa import prompts from tests.conftest import BUILTIN_INPUTS ######################## #", "isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple に aws_secret_access_key がセットされているならば if in_both.aws_secret_access_key is not None:", "type(aws_account_id) == int # テスト ユーザー入力の AWSアカウントID が int じゃない場合、False が返される def test_user_input_is_not_int(monkeypatch):", "lambda _: user_input) # WHEN: this function is called profile = data_manager.get_selected_profile() assert", "changed to fake path # GIVEN: Create fake AWS_ACCOUNT_FOR_SET_AWS_MFA data_manager.create_aws_account_id_file() # GIVEN: 対象", "= validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is not exist assert not is_the_file_exists #", "12345 data_manager.create_aws_account_id_file() # WHEN: check the existence of info for the given profile", "the profile prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr() assert profile.name in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE", "該当ProfileのAWSアカウントID の入力を求める def test_when_no_aws_account_file_asks_for_user_input(set_fake_aws_account_files, delete_fake_aws_account_files, perfect_profile_list, capsys): # GIVEN a Profile profile =", "profile_name_list def test_prompt_displays_profile_name(capsys, perfect_profile_list): \"\"\"テスト:プロファイルの選択肢が表示されるかどうか\"\"\" # GIVEN: get perfect_profile_list # WHEN: execute prompt_user_selection()", "Create fake AWS_ACCOUNT_FOR_data_manager # GIVEN: No info for profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA", "profile = perfect_profile_list[0] # WHEN: call the function mfa_arn = data_manager.get_mfa_arn(profile) # THEN:", "is_int # テスト ユーザー入力の AWSアカウントID が int の場合、True が返される def test_user_input_is_int(monkeypatch): # GIVEN:", "AWS_ACCOUNT_FOR_SET_AWS_MFA replaced with fake path # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists", "out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def", "message to ask for input aws account id for the profile out, err", "out.rstrip() # テスト該当プロファイルのMFA ARN を取得する def test_get_mfa_arn(perfect_profile_list): # GIVEN: a ProfileTuple profile =", "import IntObject from set_aws_mfa import prompts from tests.conftest import BUILTIN_INPUTS ######################## # Get", "k in credentials_lists: # GIVEN: CredentialsTuple の name だけを抽出する credentials_name_list.append(k.name) for in_both in", "-*- coding: utf-8 -*- from random import randint from set_aws_mfa.data.data_manager import ProfileTuple from", "assert \") \" + p.name in out.strip() def test_get_selected_profile(perfect_profile_list, monkeypatch): # GIVEN: perfect", "\"12345\" # GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: user_input_not_int) # WHEN:", "the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA is_the_file_exists = validate.check_aws_accounts_for_set_aws_mfa_existence() # THEN: The file is exist", "assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_BEFORE in out.rstrip() assert prompts.PROMPT_ASK_AWS_ACCOUNT_ID_FOR_PROFILE_AFTER in out.rstrip() # ~/.aws_accounts_for_set_aws_mfa から該当ProfileのAWSアカウントIDを取得する def test_get_aws_account_id_for_the_profile(perfect_profile_list):", "an int assert not is_int # テスト ユーザー入力の AWSアカウントID が int の場合、True が返される", "No info for profile exists in fake AWS_ACCOUNT_FOR_SET_AWS_MFA # GIVEN: 対象 profile を指定する", "AWS_ACCOUNT_FOR_SET_AWS_MFA and it is created data_manager.prepare_aws_account_id_file() # WHEN: Check the existence of AWS_ACCOUNT_FOR_SET_AWS_MFA", "name だけを抽出する credentials_name_list.append(k.name) for in_both in perfect_profile_list: assert isinstance(in_both, ProfileTuple) # WHEN: ProfileTuple", "# GIVEN: Mock user input string monkeypatch.setattr(BUILTIN_INPUTS, lambda _: aws_account_id_int) # WHEN: check", "aws account id for the profile prompts.prompt_for_asking_aws_account_id(profile) out, err = capsys.readouterr() assert profile.name", "len(perfect_profile_list)) # WHEN: get profile instance for the input number profile_instance = data_manager.get_specified_profile(" ]
[]
[ "<reponame>michaeljneely/model-uncertainty-pos-tagging<filename>dl4nlp_pos_tagging/models/modules/seq2seq_encoders/bi_feedforward_encoder.py from overrides import overrides from allennlp.modules.seq2seq_encoders.feedforward_encoder import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder", "allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class BiFeedForwardEncoder(FeedForwardEncoder): @overrides def is_bidirectional(self) -> bool: return True", "from allennlp.modules.seq2seq_encoders.feedforward_encoder import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class BiFeedForwardEncoder(FeedForwardEncoder): @overrides def", "overrides from allennlp.modules.seq2seq_encoders.feedforward_encoder import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class BiFeedForwardEncoder(FeedForwardEncoder): @overrides", "from overrides import overrides from allennlp.modules.seq2seq_encoders.feedforward_encoder import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\")", "FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class BiFeedForwardEncoder(FeedForwardEncoder): @overrides def is_bidirectional(self) -> bool:", "import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class BiFeedForwardEncoder(FeedForwardEncoder): @overrides def is_bidirectional(self) ->", "allennlp.modules.seq2seq_encoders.feedforward_encoder import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class BiFeedForwardEncoder(FeedForwardEncoder): @overrides def is_bidirectional(self)", "overrides import overrides from allennlp.modules.seq2seq_encoders.feedforward_encoder import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class", "from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class BiFeedForwardEncoder(FeedForwardEncoder): @overrides def is_bidirectional(self) -> bool: return", "import overrides from allennlp.modules.seq2seq_encoders.feedforward_encoder import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register(\"bi-feedforward\") class BiFeedForwardEncoder(FeedForwardEncoder):" ]
[ "toml def deps(dict_1040): \"\"\" A function to calculate no. of dependents. This currently", "+ str(counter)][\"FN_LN\"] != \"\": counter += 1 dep_count += 1 return dep_count def", "!= \"\": counter += 1 dep_count += 1 return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"])", "\"\": counter += 1 dep_count += 1 return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def", "counter <= 4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\": counter += 1 dep_count", "deps(dict_1040): \"\"\" A function to calculate no. of dependents. This currently goes up", "<gh_stars>0 #!/usr/bin/env python3 \"\"\" Programs for processing form 1040 \"\"\" import toml def", "def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This is the main function. \"\"\" d_1040", "1 dep_count += 1 return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This", "no. of dependents. This currently goes up to 4 dependents. \"\"\" dep_count =", "up to 4 dependents. \"\"\" dep_count = 0 #while counter = 1 while", "#print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B = toml.load(\"sched_B.case1.toml\")", "to 4 dependents. \"\"\" dep_count = 0 #while counter = 1 while counter", "counter += 1 dep_count += 1 return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start():", "goes up to 4 dependents. \"\"\" dep_count = 0 #while counter = 1", "main function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"]", "0 or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B = toml.load(\"sched_B.case1.toml\") proc_sched_B(d_sched_B) if __name__ == \"__main__\":", "= 1 while counter <= 4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\": counter", "def deps(dict_1040): \"\"\" A function to calculate no. of dependents. This currently goes", "1 while counter <= 4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\": counter +=", "#while counter = 1 while counter <= 4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] !=", "import toml def deps(dict_1040): \"\"\" A function to calculate no. of dependents. This", "#print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"] > 0:", "4 dependents. \"\"\" dep_count = 0 #while counter = 1 while counter <=", "\"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0", "1040 \"\"\" import toml def deps(dict_1040): \"\"\" A function to calculate no. of", "= 0 #while counter = 1 while counter <= 4 and dict_1040[\"Dep\" +", "0 #while counter = 1 while counter <= 4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"]", "\"\"\" This is the main function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"])", "\"\"\" import toml def deps(dict_1040): \"\"\" A function to calculate no. of dependents.", "dependents. \"\"\" dep_count = 0 #while counter = 1 while counter <= 4", "start(): \"\"\" This is the main function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"])", "for processing form 1040 \"\"\" import toml def deps(dict_1040): \"\"\" A function to", "the main function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if", "dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\": counter += 1 dep_count += 1 return dep_count", "def start(): \"\"\" This is the main function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040)", "str(counter)][\"FN_LN\"] != \"\": counter += 1 dep_count += 1 return dep_count def proc_sched_B(dict_sched_B):", "#print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B =", "\"\"\" Programs for processing form 1040 \"\"\" import toml def deps(dict_1040): \"\"\" A", "return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This is the main function.", "dep_count = 0 #while counter = 1 while counter <= 4 and dict_1040[\"Dep\"", "+= 1 dep_count += 1 return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\"", "and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\": counter += 1 dep_count += 1 return", "Programs for processing form 1040 \"\"\" import toml def deps(dict_1040): \"\"\" A function", "A function to calculate no. of dependents. This currently goes up to 4", "#print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B", "\"\"\" A function to calculate no. of dependents. This currently goes up to", "python3 \"\"\" Programs for processing form 1040 \"\"\" import toml def deps(dict_1040): \"\"\"", "print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This is the main function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\")", "if d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B = toml.load(\"sched_B.case1.toml\") proc_sched_B(d_sched_B) if", "dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This is the main function. \"\"\"", "is the main function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040))", "counter = 1 while counter <= 4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\":", "> 0 or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B = toml.load(\"sched_B.case1.toml\") proc_sched_B(d_sched_B) if __name__ ==", "#!/usr/bin/env python3 \"\"\" Programs for processing form 1040 \"\"\" import toml def deps(dict_1040):", "form 1040 \"\"\" import toml def deps(dict_1040): \"\"\" A function to calculate no.", "proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This is the main function. \"\"\" d_1040 =", "+= 1 return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This is the", "of dependents. This currently goes up to 4 dependents. \"\"\" dep_count = 0", "toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"] >", "processing form 1040 \"\"\" import toml def deps(dict_1040): \"\"\" A function to calculate", "function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] >", "dep_count += 1 return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This is", "d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0 or", "\"\"\" dep_count = 0 #while counter = 1 while counter <= 4 and", "<= 4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\": counter += 1 dep_count +=", "4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\": counter += 1 dep_count += 1", "or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B = toml.load(\"sched_B.case1.toml\") proc_sched_B(d_sched_B) if __name__ == \"__main__\": start()", "to calculate no. of dependents. This currently goes up to 4 dependents. \"\"\"", "= toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"]) print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"]", "while counter <= 4 and dict_1040[\"Dep\" + str(counter)][\"FN_LN\"] != \"\": counter += 1", "dependents. This currently goes up to 4 dependents. \"\"\" dep_count = 0 #while", "d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B = toml.load(\"sched_B.case1.toml\") proc_sched_B(d_sched_B) if __name__", "print(deps(d_1040)) if d_1040[\"Main\"][\"i2a\"] > 0 or d_1040[\"Main\"][\"i3a\"] > 0: d_sched_B = toml.load(\"sched_B.case1.toml\") proc_sched_B(d_sched_B)", "function to calculate no. of dependents. This currently goes up to 4 dependents.", "calculate no. of dependents. This currently goes up to 4 dependents. \"\"\" dep_count", "currently goes up to 4 dependents. \"\"\" dep_count = 0 #while counter =", "1 return dep_count def proc_sched_B(dict_sched_B): print(dict_sched_B[\"Part3_Foreign_Accounts_Trusts\"][\"i7a\"]) def start(): \"\"\" This is the main", "This is the main function. \"\"\" d_1040 = toml.load(\"f1040.case1.toml\") #print(d_1040) #print(d_1040[\"Dependents\"][\"Dep1\"]) #print(d_1040[\"Dep1\"][\"FN_LN\"]) #print(d_1040[\"Address\"][\"Street\"])", "This currently goes up to 4 dependents. \"\"\" dep_count = 0 #while counter" ]
[ "from . import exceptions from .scopedchainmap import ScopedChainMap __all__ = [\"interpret\", \"exceptions\", \"ScopedChainMap\"]", "<gh_stars>100-1000 from .interpreter import interpret from . import exceptions from .scopedchainmap import ScopedChainMap", "interpret from . import exceptions from .scopedchainmap import ScopedChainMap __all__ = [\"interpret\", \"exceptions\",", "import interpret from . import exceptions from .scopedchainmap import ScopedChainMap __all__ = [\"interpret\",", "from .interpreter import interpret from . import exceptions from .scopedchainmap import ScopedChainMap __all__", ".interpreter import interpret from . import exceptions from .scopedchainmap import ScopedChainMap __all__ =" ]
[ "trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group meta reflected OK': current_logger.error('error ::", "% str(err)) if engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg", "sqlalchemy.sql import select from database import get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id", "engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group meta", "get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none':", "try: connection = engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt", "metric_groups that a metric is part of. \"\"\" current_skyline_app_logger = current_skyline_app + 'Log'", "to determine base_name from metric_id: %s - %s' % ( str(group_metric_id), str(err))) if", "shifted_counts # Remap the metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id']", "= {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine,", "current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed to build metric_groups dict - %s' %", "metric_groups dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) for related_metric in", "from database import get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name,", "a MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error", "related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts =", "for row in results: group_metric_id = row['metric_id'] group_base_name = None try: group_base_name =", "to build metric_groups dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) for", "\"\"\" Returns a dict of all the metric_groups that a metric is part", "not get a MySQL engine - %s' % str(err)) if engine: try: metric_group_table,", "related_to_metric_groups :: could not get a MySQL engine trace - %s' % str(trace))", "current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed to determine base_name from metric_id: %s -", "current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine fail_msg - %s'", "ast import literal_eval from sqlalchemy.sql import select from database import get_engine, engine_disposal, metric_group_table_meta", "in results: group_metric_id = row['metric_id'] group_base_name = None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id)", "group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups", "= metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg", "if fail_msg != 'got MySQL engine': current_logger.error('error :: related_to_metric_groups :: could not get", "MySQL engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error ::", "\"\"\" Get anomalies for a metric id \"\"\" import logging import traceback from", "%s' % str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not", "Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed to build metric_groups dict", "as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine", "== 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts =", "import get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\"", "= current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name", "related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups ::", "err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine -", "def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns a dict of all the metric_groups that", "engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])):", "!= 'metric_group meta reflected OK': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta", "could not get metric_group_table_meta fail_msg - %s' % str(fail_msg)) if trace != 'none':", "related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] =", "= str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str)", ":: metric_group_table_meta - %s' % str(err)) try: connection = engine.connect() if metric_id: stmt", "import select from database import get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def", "Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could not get a MySQL", "str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts", "- %s' % str(err)) try: connection = engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id", "list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key]", "from ast import literal_eval from sqlalchemy.sql import select from database import get_engine, engine_disposal,", "group_metric_id) except Exception as err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed to determine", ":: could not get metric_group_table_meta fail_msg - %s' % str(fail_msg)) if trace !=", "get metric_group_table_meta trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error", ":: could not get metric_group_table_meta trace - %s' % str(trace)) except Exception as", "a metric id \"\"\" import logging import traceback from ast import literal_eval from", "traceback from ast import literal_eval from sqlalchemy.sql import select from database import get_engine,", "% str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta -", "as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta - %s' % str(err)) try:", "err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta - %s' % str(err)) try: connection", "except Exception as err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed to determine base_name", "- %s' % ( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except", "%s - %s' % ( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close()", "- %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for", "anomalies for a metric id \"\"\" import logging import traceback from ast import", "%s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could", "engine) if fail_msg != 'metric_group meta reflected OK': current_logger.error('error :: related_to_metric_groups :: could", "of all the metric_groups that a metric is part of. \"\"\" current_skyline_app_logger =", "related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if", "build metric_groups dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) for related_metric", "metric is part of. \"\"\" current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger)", "str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could not get", "{} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine':", "Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta - %s' % str(err))", "if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL", "'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id", "import logging import traceback from ast import literal_eval from sqlalchemy.sql import select from", "!= 'none': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta trace - %s'", "<reponame>datastreaming/skyline-1 \"\"\" Get anomalies for a metric id \"\"\" import logging import traceback", "group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id", "trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine", "+ 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] =", "Remap the metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] = metric_id", "% ( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception as", "related_to_metric_groups :: metric_group_table_meta - %s' % str(err)) try: connection = engine.connect() if metric_id:", "- %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups ::", ":: related_to_metric_groups :: could not get metric_group_table_meta trace - %s' % str(trace)) except", "get a MySQL engine trace - %s' % str(trace)) except Exception as err:", "metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group meta reflected OK':", "metric_group_table_meta fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups", "metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group meta reflected OK': current_logger.error('error :: related_to_metric_groups ::", "engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in", "related_to_metric_groups :: could not get metric_group_table_meta fail_msg - %s' % str(fail_msg)) if trace", "\"\"\" current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric']", "not get metric_group_table_meta trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc())", "is part of. \"\"\" current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict", "connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed to build", "all the metric_groups that a metric is part of. \"\"\" current_skyline_app_logger = current_skyline_app", "a metric is part of. \"\"\" current_skyline_app_logger = current_skyline_app + 'Log' current_logger =", "\"\"\" import logging import traceback from ast import literal_eval from sqlalchemy.sql import select", ":: could not get a MySQL engine - %s' % str(err)) if engine:", ":: related_to_metric_groups :: could not get a MySQL engine trace - %s' %", "Returns a dict of all the metric_groups that a metric is part of.", "engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results", "'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key]", "= literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the", "= select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results = connection.execute(stmt) for row", "Get anomalies for a metric id \"\"\" import logging import traceback from ast", "if key == 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError:", "= get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed", "fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group meta reflected OK': current_logger.error('error", "fail_msg != 'got MySQL engine': current_logger.error('error :: related_to_metric_groups :: could not get a", "dict of all the metric_groups that a metric is part of. \"\"\" current_skyline_app_logger", "a MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc())", ":: could not get a MySQL engine trace - %s' % str(trace)) except", "group_metric_id = row['metric_id'] group_base_name = None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception", "current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] =", "metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results = connection.execute(stmt)", "== metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results = connection.execute(stmt) for row in results:", "key == 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts", "determine base_name from metric_id: %s - %s' % ( str(group_metric_id), str(err))) if group_base_name:", "failed to determine base_name from metric_id: %s - %s' % ( str(group_metric_id), str(err)))", "try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key]", "%s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key", "list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if", "= base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg, trace =", "MySQL engine - %s' % str(err)) if engine: try: metric_group_table, fail_msg, trace =", "as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed to build metric_groups dict -", "get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns", "a dict of all the metric_groups that a metric is part of. \"\"\"", "related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] =", "metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results = connection.execute(stmt) for row in results: group_metric_id", "row in results: group_metric_id = row['metric_id'] group_base_name = None try: group_base_name = get_base_name_from_metric_id(current_skyline_app,", "metric_id): \"\"\" Returns a dict of all the metric_groups that a metric is", "str(err)) if engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg !=", "in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8')", "related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got", "OK': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta fail_msg - %s' %", "( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception as err:", "fail_msg != 'metric_group meta reflected OK': current_logger.error('error :: related_to_metric_groups :: could not get", "select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results = connection.execute(stmt) for row in", "!= 'got MySQL engine': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL", "trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error('error :: related_to_metric_groups ::", "engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error('error ::", "engine - %s' % str(err)) if engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app,", "could not get a MySQL engine - %s' % str(err)) if engine: try:", "could not get metric_group_table_meta trace - %s' % str(trace)) except Exception as err:", "reflected OK': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta fail_msg - %s'", "literal_eval from sqlalchemy.sql import select from database import get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id", "import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns a dict of all the", "connection = engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt =", "related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try:", "MySQL engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error", "shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap", ":: related_to_metric_groups :: base_name_from_metric_id failed to determine base_name from metric_id: %s - %s'", "meta reflected OK': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta fail_msg -", "in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if", "{} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg,", ":: base_name_from_metric_id failed to determine base_name from metric_id: %s - %s' % (", "'got MySQL engine': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine", "logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {}", "select([metric_group_table]) results = connection.execute(stmt) for row in results: group_metric_id = row['metric_id'] group_base_name =", "% str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could not", "if fail_msg != 'metric_group meta reflected OK': current_logger.error('error :: related_to_metric_groups :: could not", "not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace !=", "in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])", "if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] =", "if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error ::", "try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error('error", "metric_group_table_meta - %s' % str(err)) try: connection = engine.connect() if metric_id: stmt =", "related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg, trace", "select from database import get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app,", "failed to build metric_groups dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine)", "engine trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error ::", "current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta - %s' % str(err)) try: connection = engine.connect()", "except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could not get a", "%s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta", "current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta fail_msg - %s' % str(fail_msg))", "the metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] = metric_id del", "get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed to", ":: related_to_metric_groups :: metric_group_table_meta - %s' % str(err)) try: connection = engine.connect() if", "in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])):", "get metric_group_table_meta fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error ::", "if engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group", "base_name_from_metric_id failed to determine base_name from metric_id: %s - %s' % ( str(group_metric_id),", "'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])", "current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine trace - %s'", "%s' % ( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception", "current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id']", "from metric_id: %s - %s' % ( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] =", "group_base_name = None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err: current_logger.error('error", ":: related_to_metric_groups :: could not get a MySQL engine - %s' % str(err))", "database import get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id):", "stmt = select([metric_group_table]) results = connection.execute(stmt) for row in results: group_metric_id = row['metric_id']", "related_to_metric_groups :: failed to build metric_groups dict - %s' % str(err)) if engine:", "%s' % str(err)) try: connection = engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id ==", "str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try: shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except", "for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key]", "current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics']", "'none': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta trace - %s' %", "= connection.execute(stmt) for row in results: group_metric_id = row['metric_id'] group_base_name = None try:", "% str(err)) try: connection = engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc())", "row['metric_id'] group_base_name = None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err:", "key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in", "'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try: shifted_counts_str =", "trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta trace -", "None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err: current_logger.error('error :: related_to_metric_groups", "import literal_eval from sqlalchemy.sql import select from database import get_engine, engine_disposal, metric_group_table_meta from", "shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the metric_id and related_metric_id for", "could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if trace", "fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups ::", "metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] = metric_id del related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_metric_id']", "related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts #", "fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error('error :: related_to_metric_groups", "= {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg != 'got MySQL", "err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed to determine base_name from metric_id: %s", "part of. \"\"\" current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict =", "shifted_counts_str = related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] =", "str(err)) if engine: engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()):", "stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results = connection.execute(stmt) for", "related_to_metric_groups :: could not get metric_group_table_meta trace - %s' % str(trace)) except Exception", "metric id \"\"\" import logging import traceback from ast import literal_eval from sqlalchemy.sql", "not get a MySQL engine trace - %s' % str(trace)) except Exception as", "err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed to build metric_groups dict - %s'", "and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] = metric_id del related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_metric_id'] return", "engine fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups", "except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta - %s' %", "str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get a", "# Remap the metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] =", "= logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {} related_to_metric_groups_dict['metric'] = base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] =", "str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc())", "'none': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine trace -", "if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table]) results =", "%s' % str(err)) if engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if", "- %s' % str(err)) if engine: try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine)", "= None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err: current_logger.error('error ::", "id \"\"\" import logging import traceback from ast import literal_eval from sqlalchemy.sql import", "get a MySQL engine - %s' % str(err)) if engine: try: metric_group_table, fail_msg,", "if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try: shifted_counts_str", "engine: engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal'", "engine': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine fail_msg -", "try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as err: current_logger.error('error :: related_to_metric_groups ::", "that a metric is part of. \"\"\" current_skyline_app_logger = current_skyline_app + 'Log' current_logger", "str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key", "except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed to build metric_groups", "related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id']", "= get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error('error :: related_to_metric_groups :: could", "import traceback from ast import literal_eval from sqlalchemy.sql import select from database import", "related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns a dict of all the metric_groups that a", "'metric_group meta reflected OK': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta fail_msg", "connection.execute(stmt) for row in results: group_metric_id = row['metric_id'] group_base_name = None try: group_base_name", "metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns a dict", "str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta - %s'", "try: metric_group_table, fail_msg, trace = metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group meta reflected", "related_to_metric_groups :: base_name_from_metric_id failed to determine base_name from metric_id: %s - %s' %", "float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts': try:", "if engine: engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if", "else: stmt = select([metric_group_table]) results = connection.execute(stmt) for row in results: group_metric_id =", "dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed to", "= shifted_counts # Remap the metric_id and related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id']", "a MySQL engine - %s' % str(err)) if engine: try: metric_group_table, fail_msg, trace", "= row['metric_id'] group_base_name = None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except Exception as", "Exception as err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed to determine base_name from", "current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine - %s' %", "related_metric_id for clarity related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_to_metric_id'] = related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] related_to_metric_groups_dict['related_to_metrics'][related_metric]['metric_id'] = metric_id del related_to_metric_groups_dict['related_to_metrics'][related_metric]['related_metric_id'] return related_to_metric_groups_dict", "from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns a dict of", "metric_id: %s - %s' % ( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row)", "results: group_metric_id = row['metric_id'] group_base_name = None try: group_base_name = get_base_name_from_metric_id(current_skyline_app, group_metric_id) except", ":: could not get a MySQL engine fail_msg - %s' % str(fail_msg)) if", "= float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key == 'shifted_counts':", "= engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else: stmt = select([metric_group_table])", "AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the metric_id and related_metric_id", "for key in list(related_to_metric_groups_dict['related_to_metrics'][related_metric].keys()): if 'decimal.Decimal' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime'", "of. \"\"\" current_skyline_app_logger = current_skyline_app + 'Log' current_logger = logging.getLogger(current_skyline_app_logger) related_to_metric_groups_dict = {}", "the metric_groups that a metric is part of. \"\"\" current_skyline_app_logger = current_skyline_app +", "str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta", "for a metric id \"\"\" import logging import traceback from ast import literal_eval", "str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name] = dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error", "related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = float(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if 'datetime.datetime' in str(type(related_to_metric_groups_dict['related_to_metrics'][related_metric][key])): related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = str(related_to_metric_groups_dict['related_to_metrics'][related_metric][key]) if key ==", "metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg, trace = get_engine(current_skyline_app) if fail_msg !=", "related_to_metric_groups :: could not get a MySQL engine fail_msg - %s' % str(fail_msg))", "current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta trace - %s' % str(trace))", "- %s' % str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could", "not get metric_group_table_meta fail_msg - %s' % str(fail_msg)) if trace != 'none': current_logger.error('error", ":: related_to_metric_groups :: could not get metric_group_table_meta fail_msg - %s' % str(fail_msg)) if", ":: failed to build metric_groups dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app,", "str(err)) try: connection = engine.connect() if metric_id: stmt = select([metric_group_table]).where(metric_group_table.c.related_metric_id == metric_id).order_by(metric_group_table.c.avg_coefficient.desc()) else:", "current_logger.error('error :: related_to_metric_groups :: failed to build metric_groups dict - %s' % str(err))", "dict - %s' % str(err)) if engine: engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()):", ":: related_to_metric_groups :: failed to build metric_groups dict - %s' % str(err)) if", "= dict(row) connection.close() except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: failed", "except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the metric_id and", "current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine - %s'", "= related_to_metric_groups_dict['related_to_metrics'][related_metric][key].decode('utf-8') shifted_counts = literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts", "logging import traceback from ast import literal_eval from sqlalchemy.sql import select from database", "current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups :: metric_group_table_meta - %s' % str(err)) try: connection =", "if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get metric_group_table_meta trace", "base_name from metric_id: %s - %s' % ( str(group_metric_id), str(err))) if group_base_name: related_to_metric_groups_dict['related_to_metrics'][group_base_name]", "!= 'none': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine trace", "could not get a MySQL engine trace - %s' % str(trace)) except Exception", "related_to_metric_groups :: could not get a MySQL engine - %s' % str(err)) if", "functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns a dict of all", ":: related_to_metric_groups :: could not get a MySQL engine fail_msg - %s' %", "get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns a dict of all the metric_groups", "from sqlalchemy.sql import select from database import get_engine, engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import", "metric_group_table_meta trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error ::", "literal_eval(shifted_counts_str) except AttributeError: shifted_counts = related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the metric_id", "% str(fail_msg)) if trace != 'none': current_logger.error('error :: related_to_metric_groups :: could not get", "= metric_group_table_meta(current_skyline_app, engine) if fail_msg != 'metric_group meta reflected OK': current_logger.error('error :: related_to_metric_groups", "results = connection.execute(stmt) for row in results: group_metric_id = row['metric_id'] group_base_name = None", "as err: current_logger.error('error :: related_to_metric_groups :: base_name_from_metric_id failed to determine base_name from metric_id:", "get_engine(current_skyline_app) if fail_msg != 'got MySQL engine': current_logger.error('error :: related_to_metric_groups :: could not", "= select([metric_group_table]) results = connection.execute(stmt) for row in results: group_metric_id = row['metric_id'] group_base_name", "engine_disposal, metric_group_table_meta from functions.metrics.get_base_name_from_metric_id import get_base_name_from_metric_id def related_to_metric_groups(current_skyline_app, base_name, metric_id): \"\"\" Returns a", "base_name related_to_metric_groups_dict['metric_id'] = metric_id related_to_metric_groups_dict['related_to_metrics'] = {} try: engine, fail_msg, trace = get_engine(current_skyline_app)", "trace - %s' % str(trace)) except Exception as err: current_logger.error(traceback.format_exc()) current_logger.error('error :: related_to_metric_groups", "base_name, metric_id): \"\"\" Returns a dict of all the metric_groups that a metric", "= related_to_metric_groups_dict['related_to_metrics'][related_metric][key] related_to_metric_groups_dict['related_to_metrics'][related_metric][key] = shifted_counts # Remap the metric_id and related_metric_id for clarity", "% str(err)) if engine: engine_disposal(current_skyline_app, engine) for related_metric in list(related_to_metric_groups_dict['related_to_metrics'].keys()): for key in", "MySQL engine': current_logger.error('error :: related_to_metric_groups :: could not get a MySQL engine fail_msg" ]
[ "error = \"There was an error importing the given file. Please try again.\"", "date_hierarchy = 'scheduled' list_filter = ('status', 'call_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name')", "'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'old', 'new', 'type', 'created')", "@admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name', 'comment', 'created') date_hierarchy = 'created' @admin.register(mwbase.Connection)", "True, }) return response else: return JsonResponse({'success': False, 'message': 'Form Invalid',}) else: return", "= getattr(obj, self.participant_field) if participant is not None: return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field)", "'modified') inlines = (ConnectionInline, NoteInline) actions = (mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field =", "duplicates, descriptions, total, errors ) = sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response = JsonResponse({", "NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name', 'comment', 'created') date_hierarchy = 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin,", "'counts': counts, 'existing': existing, 'diff': diff, 'error': error, **(extra_context or {}), } return", "'event', 'created') class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class UserAdmin(UserAdmin): inlines = (PractitionerInline,) #", "study_id.short_description = 'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self, obj): connection = getattr(obj,", "'new', 'type', 'created') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display = ('user',", "quit' def revert_status(modeladmin, request, queryset): ''' set the status for each participant in", "= (ConnectionInline, NoteInline) actions = (mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field = 'participant' def", "= 'attachment; filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self, request, extra_context=None): opts = self.model._meta app_label", "and save ''' for c in queryset: c.set_status('quit', comment='Status set from bulk quit", "= \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None): extra_context = extra_context or {} extra_context['form'] =", "= ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username', 'password_changed') @admin.register(StatusChange) class", "('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day') ordering = ('study_id',) search_fields =", "comment='Status set from bulk quit action') mark_quit.short_description = 'Mark participant as quit' def", "'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text', 'participant_name', 'identity',", "file = form.cleaned_data.get(\"file\") # try: counts, existing, diff= sms_bank.import_messages(file) # except Exception as", "'display_name', 'connection__identity', 'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system', 'created', 'modified') inlines = (ConnectionInline, NoteInline)", "self.model._meta app_label = opts.app_label items = duplicates = descriptions = total = None", "from django.http.response import HttpResponse from django.http import JsonResponse from django.template.response import SimpleTemplateResponse, TemplateResponse", "'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self, obj): connection = getattr(obj, self.participant_field).connection() if", "'connection__identity', 'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system', 'created', 'modified') inlines = (ConnectionInline, NoteInline) actions", "as e: # print(e) # error = \"There was an error importing the", "response = JsonResponse({ 'url': url, 'duplicates': duplicates, 'errors': errors, 'total': total, 'success': True,", "= self.model._meta app_label = opts.app_label items = duplicates = descriptions = total =", "= ('identity', 'participant_name', 'facility', 'is_primary') search_fields = ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin,", "response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self, request, extra_context=None): opts = self.model._meta", "study_id(self, obj): return getattr(obj, self.participant_field).study_id study_id.short_description = 'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def", "search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields = ('created', 'modified') def identity(self, obj): return", "swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model = mwbase.Connection extra =", "obj): connection = getattr(obj, self.participant_field).connection() if connection is not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection))", "VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy", "'Revert to last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name', 'preg_status', 'sms_status',", "path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls = my_urls", "('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin,", "counts, existing, diff= sms_bank.import_messages(file) # except Exception as e: # print(e) # error", "import save_virtual_workbook import utils.admin as utils # Local Imports from mwbase import models", "models as mwbase from mwbase.forms import ImportXLSXForm from mwbase.utils import sms_bank import swapper", "to their previous status ''' for c in queryset: old_status = c.statuschange_set.last().old c.set_status(old_status,", "= { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts': counts, 'existing': existing, 'diff': diff,", "= my_urls + urls return urls def smsbank_create_xlsx(self, request, extra_context=None): wb = sms_bank.create_xlsx()", "revert_status(modeladmin, request, queryset): ''' set the status for each participant in queryset to", "if form.is_valid(): file = form.cleaned_data.get(\"file\") # try: counts, existing, diff= sms_bank.import_messages(file) # except", "as utils # Local Imports from mwbase import models as mwbase from mwbase.forms", "'url': url, 'duplicates': duplicates, 'errors': errors, 'total': total, 'success': True, }) return response", "@admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'old', 'new', 'type', 'created') search_fields", "queryset: c.set_status('quit', comment='Status set from bulk quit action') mark_quit.short_description = 'Mark participant as", "if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total,", "('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy =", "''' for c in queryset: old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from bulk", "(ConnectionInline, NoteInline) actions = (mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field = 'participant' def participant_name(self,", "'admin/sms_bank_import.html' ], context) def smsbank_check_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label", "context) def smsbank_check_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label items =", "facility(self, obj): participant = getattr(obj, self.participant_field) if participant is not None: return participant.facility.capitalize()", "django.http.response import HttpResponse from django.http import JsonResponse from django.template.response import SimpleTemplateResponse, TemplateResponse from", "list_display = ('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled'", "to last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name', 'preg_status', 'sms_status', 'description',", "import UserAdmin from django.contrib.auth.models import User from django.http.response import HttpResponse from django.http import", "getattr(obj, self.participant_field) if participant is not None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description", "= ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'visit_type',", "urls def smsbank_create_xlsx(self, request, extra_context=None): wb = sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition']", "class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english') list_filter = ('send_base', 'condition', 'group') change_list_template =", "total, errors ) = sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response = JsonResponse({ 'url': url,", "'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity', 'participant_name', 'facility', 'is_primary') search_fields =", "ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy", "\"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None): extra_context = extra_context or {}", "except Exception as e: # print(e) # error = \"There was an error", "= '{}__study_id'.format(participant_field) def phone_number(self, obj): connection = getattr(obj, self.participant_field).connection() if connection is not", "= form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total, errors ) = sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view')", "mwbase.forms import ImportXLSXForm from mwbase.utils import sms_bank import swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\")", "import sms_bank import swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange", "django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.http.response", "'due_date', 'language', 'send_day', 'is_validated', 'created') list_display_links = ('study_id', 'display_name') list_filter = ('facility', 'study_group',", "ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls = super().get_urls() my_urls = [", "{ **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts': counts, 'existing': existing, 'diff': diff, 'error':", "html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display", "reverse from django.utils import html from openpyxl.writer.excel import save_virtual_workbook import utils.admin as utils", "1 def mark_quit(modeladmin, request, queryset): ''' mark all mwbase in queryset as quit", "diff= [], [], [] error = \"\" if request.method == 'POST': if form.is_valid():", "not None: return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self, obj): return getattr(obj, self.participant_field).study_id", "return urls def smsbank_create_xlsx(self, request, extra_context=None): wb = sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')", "admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.http.response import HttpResponse", "None) if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") (items, duplicates, descriptions,", "extra_context = extra_context or {} extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def", "HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self, request, extra_context=None): opts", "import utils.admin as utils # Local Imports from mwbase import models as mwbase", "= swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model = mwbase.Connection extra = 0 class NoteInline(admin.TabularInline):", "= 0 class NoteInline(admin.TabularInline): model = mwbase.Note extra = 1 def mark_quit(modeladmin, request,", "or {} extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls =", "= descriptions = total = None form = ImportXLSXForm(request.POST or None, request.FILES or", "'modified') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name', 'comment',", "status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name', 'preg_status', 'sms_status', 'description', 'facility', 'phone_number',", "super().get_urls() my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx')", "search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name', 'comment', 'created')", "import html from openpyxl.writer.excel import save_virtual_workbook import utils.admin as utils # Local Imports", "class EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event', 'created') class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class", "if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") # try: counts, existing,", "'created') list_display_links = ('study_id', 'display_name') list_filter = ('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated',", "def identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall)", "= 'participant' def participant_name(self, obj): participant = getattr(obj, self.participant_field) if participant is not", "= 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text', 'participant_name',", "= 'scheduled' list_filter = ('status', 'visit_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall)", "total, 'success': True, }) return response else: return JsonResponse({'success': False, 'message': 'Form Invalid',})", "save_virtual_workbook import utils.admin as utils # Local Imports from mwbase import models as", "bulk quit action') mark_quit.short_description = 'Mark participant as quit' def revert_status(modeladmin, request, queryset):", "if participant is not None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS", "django.urls import path, reverse from django.utils import html from openpyxl.writer.excel import save_virtual_workbook import", "('send_base', 'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def", "'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created') list_filter = ('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter),", "name='smsbank_create_xlsx') ] urls = my_urls + urls return urls def smsbank_create_xlsx(self, request, extra_context=None):", "extra_context=None): wb = sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return", "in queryset: old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from bulk action') revert_status.short_description =", "'is_validated', 'language', 'send_day') ordering = ('study_id',) search_fields = ('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields", "from mwbase.utils import sms_bank import swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\",", "facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self, obj): return getattr(obj, self.participant_field).study_id study_id.short_description = 'Study ID'", "date_hierarchy = 'created' list_filter = ('outcome', 'is_outgoing') readonly_fields = ('created', 'modified') search_fields =", "else: return JsonResponse({'success': False, 'message': 'Form Invalid',}) else: return JsonResponse({'success': False, 'message': 'Invalid", "JsonResponse from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import path, reverse from django.utils", "@admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event', 'created') class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner", "self.model._meta app_label = opts.app_label form = ImportXLSXForm(request.POST or None, request.FILES or None) counts,", "import HttpResponse from django.http import JsonResponse from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls", "response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self, request,", "search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event', 'created') class", "revert_status.short_description = 'Revert to last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name',", "is not None: return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self, obj): return getattr(obj,", "href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display =", "'display_name') list_filter = ('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day') ordering =", "Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english') list_filter", "= ('description', 'english') list_filter = ('send_base', 'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template =", "urls return urls def smsbank_create_xlsx(self, request, extra_context=None): wb = sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb),", "sms_bank.import_messages(file) # except Exception as e: # print(e) # error = \"There was", "Exception as e: # print(e) # error = \"There was an error importing", "was an error importing the given file. Please try again.\" context = {", "= duplicates = descriptions = total = None form = ImportXLSXForm(request.POST or None,", "= JsonResponse({ 'url': url, 'duplicates': duplicates, 'errors': errors, 'total': total, 'success': True, })", "for c in queryset: c.set_status('quit', comment='Status set from bulk quit action') mark_quit.short_description =", "}) return response else: return JsonResponse({'success': False, 'message': 'Form Invalid',}) else: return JsonResponse({'success':", "self.participant_field) if participant is not None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description =", "@admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text', 'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status',", "'participant_name', 'old', 'new', 'type', 'created') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display", "def changelist_view(self, request, extra_context=None): extra_context = extra_context or {} extra_context['form'] = ImportXLSXForm return", "ParticipantAdminMixin(object): participant_field = 'participant' def participant_name(self, obj): participant = getattr(obj, self.participant_field) if participant", "actions = (mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field = 'participant' def participant_name(self, obj): participant", "request, extra_context=None): wb = sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"'", "django.utils import html from openpyxl.writer.excel import save_virtual_workbook import utils.admin as utils # Local", "return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin):", "def study_id(self, obj): return getattr(obj, self.participant_field).study_id study_id.short_description = 'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field)", "participant_name.short_description = 'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self, obj): participant = getattr(obj,", "'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request,", "mwbase.Practitioner class UserAdmin(UserAdmin): inlines = (PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage)", "importing the given file. Please try again.\" context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural),", "ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy =", "= ('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter", "('study_id', 'display_name') list_filter = ('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day') ordering", "request.FILES or None) counts, existing, diff= [], [], [] error = \"\" if", "sms_bank import swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange =", "app_label, 'admin/sms_bank_import.html' ], context) def smsbank_check_view(self, request, extra_context=None): opts = self.model._meta app_label =", "list_display = ('text', 'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created') list_filter =", "identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class", "= '{}__study_id'.format(participant_field) def facility(self, obj): participant = getattr(obj, self.participant_field) if participant is not", "'scheduled' list_filter = ('status', 'call_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class", "filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label", "file = form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total, errors ) = sms_bank.check_messages(file) url =", "EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event', 'created') class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class UserAdmin(UserAdmin):", "UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english') list_filter =", "'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'visit_type', 'arrived', 'scheduled') search_fields =", "again.\" context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts': counts, 'existing': existing,", "('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter =", "request, queryset): ''' mark all mwbase in queryset as quit and save '''", "# except Exception as e: # print(e) # error = \"There was an", "as mwbase from mwbase.forms import ImportXLSXForm from mwbase.utils import sms_bank import swapper AutomatedMessage", "= 'Revert to last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name', 'preg_status',", "'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self, obj): participant = getattr(obj, self.participant_field) if", "= ('study_id',) search_fields = ('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system', 'created',", "'modified') def identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field = 'connection__identity'", "list_filter = ('status', 'call_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin):", "'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'visit_type',", "error = \"\" if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") #", "= ('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy", "'diff': diff, 'error': error, **(extra_context or {}), } return TemplateResponse(request, self.smsbank_import_template or [", "'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'visit_type', 'arrived', 'scheduled')", "'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'visit_type', 'arrived', 'scheduled') search_fields", "context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts': counts, 'existing': existing, 'diff':", "= 'created' search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields = ('created', 'modified') def identity(self,", "'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ], context) def smsbank_check_view(self, request, extra_context=None): opts = self.model._meta", "0 class NoteInline(admin.TabularInline): model = mwbase.Note extra = 1 def mark_quit(modeladmin, request, queryset):", "'Mark participant as quit' def revert_status(modeladmin, request, queryset): ''' set the status for", "model = mwbase.Connection extra = 0 class NoteInline(admin.TabularInline): model = mwbase.Note extra =", "openpyxl.writer.excel import save_virtual_workbook import utils.admin as utils # Local Imports from mwbase import", "search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username', 'password_changed') @admin.register(StatusChange)", "opts, 'counts': counts, 'existing': existing, 'diff': diff, 'error': error, **(extra_context or {}), }", "**self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts': counts, 'existing': existing, 'diff': diff, 'error': error,", "\"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None): extra_context =", "or [ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ], context) def", "return JsonResponse({'success': False, 'message': 'Form Invalid',}) else: return JsonResponse({'success': False, 'message': 'Invalid method',})", "('created', 'modified') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name',", "'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'old', 'new', 'type',", "'participant' def participant_name(self, obj): participant = getattr(obj, self.participant_field) if participant is not None:", "duplicates, 'errors': errors, 'total': total, 'success': True, }) return response else: return JsonResponse({'success':", "def facility(self, obj): participant = getattr(obj, self.participant_field) if participant is not None: return", "inlines = (PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display", "= None form = ImportXLSXForm(request.POST or None, request.FILES or None) if request.method ==", "ImportXLSXForm(request.POST or None, request.FILES or None) if request.method == 'POST': if form.is_valid(): file", "from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.http.response import HttpResponse from", "'is_reply', 'external_status', 'translation_status', 'created') list_filter = ('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter),", "class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name', 'comment', 'created') date_hierarchy = 'created' @admin.register(mwbase.Connection) class", "or None) counts, existing, diff= [], [], [] error = \"\" if request.method", "== 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") # try: counts, existing, diff= sms_bank.import_messages(file)", "+ urls return urls def smsbank_create_xlsx(self, request, extra_context=None): wb = sms_bank.create_xlsx() response =", "'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'call_type', 'arrived',", "c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from bulk action') revert_status.short_description = 'Revert to last status'", "Imports from mwbase import models as mwbase from mwbase.forms import ImportXLSXForm from mwbase.utils", "obj): participant = getattr(obj, self.participant_field) if participant is not None: return html.format_html( \"<a", "SimpleTemplateResponse, TemplateResponse from django.urls import path, reverse from django.utils import html from openpyxl.writer.excel", "('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day') ordering = ('study_id',) search_fields = ('study_id', 'display_name',", "'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event', 'created') class PractitionerInline(admin.TabularInline): model =", "list_filter = ('send_base', 'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template =", "self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls = my_urls + urls return urls", "def smsbank_import_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label form = ImportXLSXForm(request.POST", "UserAdmin from django.contrib.auth.models import User from django.http.response import HttpResponse from django.http import JsonResponse", "self.participant_field).connection() if connection is not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field", "url = reverse('admin:smsbank_import_view') response = JsonResponse({ 'url': url, 'duplicates': duplicates, 'errors': errors, 'total':", "comment='Status reverted from bulk action') revert_status.short_description = 'Revert to last status' @admin.register(Participant) class", "name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls = my_urls + urls", "[] error = \"\" if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\")", "= total = None form = ImportXLSXForm(request.POST or None, request.FILES or None) if", "phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text', 'participant_name', 'identity', 'is_system',", "= ('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day') ordering = ('study_id',) search_fields", "queryset: old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from bulk action') revert_status.short_description = 'Revert", "self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls = my_urls +", "'facility', 'phone_number', 'due_date', 'language', 'send_day', 'is_validated', 'created') list_display_links = ('study_id', 'display_name') list_filter =", "'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts': counts, 'existing': existing, 'diff': diff, 'error': error, **(extra_context", "ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self, obj): connection = getattr(obj, self.participant_field).connection() if connection", "'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy = 'created'", "bulk action') revert_status.short_description = 'Revert to last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display =", "utils # Local Imports from mwbase import models as mwbase from mwbase.forms import", "= ('study_id', 'display_name') list_filter = ('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day')", "StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model = mwbase.Connection extra = 0 class", "\"StatusChange\") class ConnectionInline(admin.TabularInline): model = mwbase.Connection extra = 0 class NoteInline(admin.TabularInline): model =", "'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created') list_filter = ('is_system', 'is_outgoing', 'external_status', ('participant',", "class UserAdmin(UserAdmin): inlines = (PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class", "participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self, obj): participant = getattr(obj, self.participant_field) if participant is", "descriptions = total = None form = ImportXLSXForm(request.POST or None, request.FILES or None)", "'attachment; filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self, request, extra_context=None): opts = self.model._meta app_label =", "= 'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self, obj): participant = getattr(obj, self.participant_field)", "request, extra_context=None): opts = self.model._meta app_label = opts.app_label items = duplicates = descriptions", "identity.short_description = 'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment',", "NoteInline) actions = (mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field = 'participant' def participant_name(self, obj):", "AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english') list_filter = ('send_base', 'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\"", "def mark_quit(modeladmin, request, queryset): ''' mark all mwbase in queryset as quit and", "'created') date_hierarchy = 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity', 'participant_name', 'facility',", "smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None): extra_context = extra_context or {} extra_context['form']", "from mwbase import models as mwbase from mwbase.forms import ImportXLSXForm from mwbase.utils import", "= ('send_base', 'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\"", "= opts.app_label items = duplicates = descriptions = total = None form =", "class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status')", "'translation_status', 'is_related', 'external_success') date_hierarchy = 'created' search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields =", "request, extra_context=None): opts = self.model._meta app_label = opts.app_label form = ImportXLSXForm(request.POST or None,", "e: # print(e) # error = \"There was an error importing the given", "action') revert_status.short_description = 'Revert to last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id',", "= ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event', 'created') class PractitionerInline(admin.TabularInline):", "django.contrib.auth.models import User from django.http.response import HttpResponse from django.http import JsonResponse from django.template.response", "request, queryset): ''' set the status for each participant in queryset to their", "= getattr(obj, self.participant_field) if participant is not None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant))", "'visit_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display =", "from django.contrib.auth.models import User from django.http.response import HttpResponse from django.http import JsonResponse from", "('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system', 'created', 'modified') inlines = (ConnectionInline,", "'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self,", "= ImportXLSXForm(request.POST or None, request.FILES or None) counts, existing, diff= [], [], []", "'total': total, 'success': True, }) return response else: return JsonResponse({'success': False, 'message': 'Form", "existing, 'diff': diff, 'error': error, **(extra_context or {}), } return TemplateResponse(request, self.smsbank_import_template or", "= 'created' list_filter = ('outcome', 'is_outgoing') readonly_fields = ('created', 'modified') search_fields = ('participant__study_id',", "('study_id', 'display_name', 'preg_status', 'sms_status', 'description', 'facility', 'phone_number', 'due_date', 'language', 'send_day', 'is_validated', 'created') list_display_links", "# try: counts, existing, diff= sms_bank.import_messages(file) # except Exception as e: # print(e)", "or None) if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") (items, duplicates,", "('participant_name', 'comment', 'created') date_hierarchy = 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity',", "] urls = my_urls + urls return urls def smsbank_create_xlsx(self, request, extra_context=None): wb", "import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.http.response import", "response def smsbank_import_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label form =", "from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from", "phone_number(self, obj): connection = getattr(obj, self.participant_field).connection() if connection is not None: return html.format_html(\"<a", "'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy = 'created' search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields", "\"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model =", "= 'scheduled' list_filter = ('status', 'call_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner)", "= 'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self, obj): connection = getattr(obj, self.participant_field).connection()", "# Local Imports from mwbase import models as mwbase from mwbase.forms import ImportXLSXForm", "old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from bulk action') revert_status.short_description = 'Revert to", "('user', 'event', 'created') class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class UserAdmin(UserAdmin): inlines = (PractitionerInline,)", "'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'visit_type', 'arrived',", "utils.admin as utils # Local Imports from mwbase import models as mwbase from", "for c in queryset: old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from bulk action')", "form.cleaned_data.get(\"file\") # try: counts, existing, diff= sms_bank.import_messages(file) # except Exception as e: #", "smsbank_import_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label form = ImportXLSXForm(request.POST or", "duplicates = descriptions = total = None form = ImportXLSXForm(request.POST or None, request.FILES", "'errors': errors, 'total': total, 'success': True, }) return response else: return JsonResponse({'success': False,", "= ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name', 'comment', 'created') date_hierarchy", "{0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self, obj): participant =", "PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy = 'created'", "= ('comment', 'participant_name', 'old', 'new', 'type', 'created') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class", "items = duplicates = descriptions = total = None form = ImportXLSXForm(request.POST or", "admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day') ordering = ('study_id',) search_fields = ('study_id', 'display_name', 'connection__identity',", "= sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response = JsonResponse({ 'url': url, 'duplicates': duplicates, 'errors':", "'type', 'created') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event',", "= \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None): extra_context", "'preg_status', 'sms_status', 'description', 'facility', 'phone_number', 'due_date', 'language', 'send_day', 'is_validated', 'created') list_display_links = ('study_id',", "= getattr(obj, self.participant_field).connection() if connection is not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description =", "form.is_valid(): file = form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total, errors ) = sms_bank.check_messages(file) url", "@admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived',", "extra = 1 def mark_quit(modeladmin, request, queryset): ''' mark all mwbase in queryset", "('outcome', 'is_outgoing') readonly_fields = ('created', 'modified') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin,", "counts, 'existing': existing, 'diff': diff, 'error': error, **(extra_context or {}), } return TemplateResponse(request,", "'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy = 'created' list_filter = ('outcome', 'is_outgoing') readonly_fields", "swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model", "= ('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system', 'created', 'modified') inlines =", "status for each participant in queryset to their previous status ''' for c", "status ''' for c in queryset: old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from", "def phone_number(self, obj): connection = getattr(obj, self.participant_field).connection() if connection is not None: return", "model = mwbase.Practitioner class UserAdmin(UserAdmin): inlines = (PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User,", "path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls = my_urls + urls return", "@admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived',", "('status', 'visit_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display", "form = ImportXLSXForm(request.POST or None, request.FILES or None) counts, existing, diff= [], [],", "class ConnectionInline(admin.TabularInline): model = mwbase.Connection extra = 0 class NoteInline(admin.TabularInline): model = mwbase.Note", "participant_field = 'participant' def participant_name(self, obj): participant = getattr(obj, self.participant_field) if participant is", "getattr(obj, self.participant_field).connection() if connection is not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number'", "PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment',", "list_display = ('description', 'english') list_filter = ('send_base', 'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template", "swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model = mwbase.Connection extra = 0 class NoteInline(admin.TabularInline): model", "= ('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter", "in queryset as quit and save ''' for c in queryset: c.set_status('quit', comment='Status", "= reverse('admin:smsbank_import_view') response = JsonResponse({ 'url': url, 'duplicates': duplicates, 'errors': errors, 'total': total,", "= self.model._meta app_label = opts.app_label form = ImportXLSXForm(request.POST or None, request.FILES or None)", "JsonResponse({ 'url': url, 'duplicates': duplicates, 'errors': errors, 'total': total, 'success': True, }) return", "c.set_status(old_status, comment='Status reverted from bulk action') revert_status.short_description = 'Revert to last status' @admin.register(Participant)", "phone_number.short_description = 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text',", "c in queryset: c.set_status('quit', comment='Status set from bulk quit action') mark_quit.short_description = 'Mark", "href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self, obj): participant", "my_urls + urls return urls def smsbank_create_xlsx(self, request, extra_context=None): wb = sms_bank.create_xlsx() response", "quit and save ''' for c in queryset: c.set_status('quit', comment='Status set from bulk", "'display_name', 'preg_status', 'sms_status', 'description', 'facility', 'phone_number', 'due_date', 'language', 'send_day', 'is_validated', 'created') list_display_links =", "diff, 'error': error, **(extra_context or {}), } return TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html'", "None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin,", "return response def smsbank_import_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label form", "import JsonResponse from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import path, reverse from", "admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy = 'created' search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity')", "class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class UserAdmin(UserAdmin): inlines = (PractitionerInline,) # Re-register UserAdmin", "list_filter = ('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success')", "], context) def smsbank_check_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label items", "queryset): ''' mark all mwbase in queryset as quit and save ''' for", "''' for c in queryset: c.set_status('quit', comment='Status set from bulk quit action') mark_quit.short_description", "mwbase.utils import sms_bank import swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\")", "('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy = 'created' search_fields = ('participant__study_id', 'participant__display_name',", "NoteInline(admin.TabularInline): model = mwbase.Note extra = 1 def mark_quit(modeladmin, request, queryset): ''' mark", "class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity', 'participant_name', 'facility', 'is_primary') search_fields = ('participant__study_id', 'participant__display_name',", "smsbank_create_xlsx(self, request, extra_context=None): wb = sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment;", "mwbase from mwbase.forms import ImportXLSXForm from mwbase.utils import sms_bank import swapper AutomatedMessage =", "% (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ], context) def smsbank_check_view(self, request, extra_context=None):", "class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'old', 'new', 'type', 'created') search_fields =", "connection is not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field)", "= 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing',", "'send_day', 'is_validated', 'created') list_display_links = ('study_id', 'display_name') list_filter = ('facility', 'study_group', ('created', admin.DateFieldListFilter),", "self.participant_field).study_id study_id.short_description = 'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self, obj): connection =", "class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy =", "'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name', 'comment', 'created') date_hierarchy = 'created'", "'status') date_hierarchy = 'scheduled' list_filter = ('status', 'visit_type', 'arrived', 'scheduled') search_fields = ('participant__study_id',", "self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls = my_urls + urls return urls def smsbank_create_xlsx(self, request,", "if form.is_valid(): file = form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total, errors ) = sms_bank.check_messages(file)", "('last_msg_client', 'last_msg_system', 'created', 'modified') inlines = (ConnectionInline, NoteInline) actions = (mark_quit, revert_status,) class", "\"\" if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") # try: counts,", "ConnectionInline(admin.TabularInline): model = mwbase.Connection extra = 0 class NoteInline(admin.TabularInline): model = mwbase.Note extra", "'external_success') date_hierarchy = 'created' search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields = ('created', 'modified')", "return response else: return JsonResponse({'success': False, 'message': 'Form Invalid',}) else: return JsonResponse({'success': False,", "ordering = ('study_id',) search_fields = ('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system',", "(mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field = 'participant' def participant_name(self, obj): participant = getattr(obj,", "in queryset to their previous status ''' for c in queryset: old_status =", "('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy = 'created' search_fields =", "list_display_links = ('study_id', 'display_name') list_filter = ('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language',", "request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total, errors", "UserAdmin(UserAdmin): inlines = (PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin):", "class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text', 'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status',", "existing, diff= [], [], [] error = \"\" if request.method == 'POST': if", "= mwbase.Note extra = 1 def mark_quit(modeladmin, request, queryset): ''' mark all mwbase", "= '{}__facility'.format(participant_field) def study_id(self, obj): return getattr(obj, self.participant_field).study_id study_id.short_description = 'Study ID' study_id.admin_order_field", "extra_context=None): extra_context = extra_context or {} extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context)", "''' mark all mwbase in queryset as quit and save ''' for c", "wb = sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return response", "'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin):", "mwbase import models as mwbase from mwbase.forms import ImportXLSXForm from mwbase.utils import sms_bank", "path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls = my_urls + urls return urls def smsbank_create_xlsx(self,", "error importing the given file. Please try again.\" context = { **self.admin_site.each_context(request), 'module_name':", "Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self, obj): participant = getattr(obj, self.participant_field) if participant", "UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english') list_filter = ('send_base', 'condition', 'group')", "mwbase.Note extra = 1 def mark_quit(modeladmin, request, queryset): ''' mark all mwbase in", "queryset): ''' set the status for each participant in queryset to their previous", "import ImportXLSXForm from mwbase.utils import sms_bank import swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant", "file. Please try again.\" context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts':", "extra_context=None): opts = self.model._meta app_label = opts.app_label items = duplicates = descriptions =", "'created') list_filter = ('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related',", "@admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy", "obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin,", "action') mark_quit.short_description = 'Mark participant as quit' def revert_status(modeladmin, request, queryset): ''' set", "= ('last_msg_client', 'last_msg_system', 'created', 'modified') inlines = (ConnectionInline, NoteInline) actions = (mark_quit, revert_status,)", "= \"\" if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") # try:", "try again.\" context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts': counts, 'existing':", "ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'old', 'new', 'type', 'created') search_fields = ('participant__study_id', 'participant__display_name')", "html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display", "('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields = ('created', 'modified') def identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection))", "= c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from bulk action') revert_status.short_description = 'Revert to last", "participant is not None: return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self, obj): return", "'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'call_type', 'arrived', 'scheduled') search_fields", "'existing': existing, 'diff': diff, 'error': error, **(extra_context or {}), } return TemplateResponse(request, self.smsbank_import_template", "[ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ], context) def smsbank_check_view(self,", "their previous status ''' for c in queryset: old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status", "an error importing the given file. Please try again.\" context = { **self.admin_site.each_context(request),", "form.is_valid(): file = form.cleaned_data.get(\"file\") # try: counts, existing, diff= sms_bank.import_messages(file) # except Exception", "\"There was an error importing the given file. Please try again.\" context =", "[ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls =", "'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'phone_number',", "previous status ''' for c in queryset: old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted", "participant = getattr(obj, self.participant_field) if participant is not None: return participant.facility.capitalize() facility.admin_order_field =", "not None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name' participant_name.admin_order_field =", "model = mwbase.Note extra = 1 def mark_quit(modeladmin, request, queryset): ''' mark all", "extra_context or {} extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls", "form = ImportXLSXForm(request.POST or None, request.FILES or None) if request.method == 'POST': if", "from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import path, reverse from django.utils import", "('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter =", "from django.urls import path, reverse from django.utils import html from openpyxl.writer.excel import save_virtual_workbook", "c in queryset: old_status = c.statuschange_set.last().old c.set_status(old_status, comment='Status reverted from bulk action') revert_status.short_description", "class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status')", "== 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total, errors )", "'created', 'modified') inlines = (ConnectionInline, NoteInline) actions = (mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field", "urls = my_urls + urls return urls def smsbank_create_xlsx(self, request, extra_context=None): wb =", "ImportXLSXForm(request.POST or None, request.FILES or None) counts, existing, diff= [], [], [] error", "def smsbank_create_xlsx(self, request, extra_context=None): wb = sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] =", "= ('text', 'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created') list_filter = ('is_system',", "counts, existing, diff= [], [], [] error = \"\" if request.method == 'POST':", "self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls = super().get_urls() my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'),", "(PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description',", "response else: return JsonResponse({'success': False, 'message': 'Form Invalid',}) else: return JsonResponse({'success': False, 'message':", "import User from django.http.response import HttpResponse from django.http import JsonResponse from django.template.response import", "django.contrib.auth.admin import UserAdmin from django.contrib.auth.models import User from django.http.response import HttpResponse from django.http", "'sms_status', 'description', 'facility', 'phone_number', 'due_date', 'language', 'send_day', 'is_validated', 'created') list_display_links = ('study_id', 'display_name')", "\"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model = mwbase.Connection extra = 0", "None: return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self, obj): return getattr(obj, self.participant_field).study_id study_id.short_description", "\"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None): extra_context = extra_context or {} extra_context['form'] = ImportXLSXForm", "quit action') mark_quit.short_description = 'Mark participant as quit' def revert_status(modeladmin, request, queryset): '''", "or {}), } return TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html'", "each participant in queryset to their previous status ''' for c in queryset:", "'{}__facility'.format(participant_field) def study_id(self, obj): return getattr(obj, self.participant_field).study_id study_id.short_description = 'Study ID' study_id.admin_order_field =", "'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system', 'created', 'modified') inlines = (ConnectionInline, NoteInline) actions =", "list_display = ('comment', 'participant_name', 'old', 'new', 'type', 'created') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog)", "getattr(obj, self.participant_field).study_id study_id.short_description = 'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self, obj): connection", "'is_validated', 'created') list_display_links = ('study_id', 'display_name') list_filter = ('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status',", "existing, diff= sms_bank.import_messages(file) # except Exception as e: # print(e) # error =", "= extra_context or {} extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self):", "= HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self, request, extra_context=None):", "User from django.http.response import HttpResponse from django.http import JsonResponse from django.template.response import SimpleTemplateResponse,", "= ('participant_name', 'comment', 'created') date_hierarchy = 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display =", "if participant is not None: return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self, obj):", "extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls = super().get_urls() my_urls", "participant as quit' def revert_status(modeladmin, request, queryset): ''' set the status for each", "return TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html'", "smsbank_check_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label items = duplicates =", "@admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english') list_filter = ('send_base', 'condition', 'group') change_list_template", "print(e) # error = \"There was an error importing the given file. Please", "= ('study_id', 'display_name', 'preg_status', 'sms_status', 'description', 'facility', 'phone_number', 'due_date', 'language', 'send_day', 'is_validated', 'created')", "'preg_status', 'is_validated', 'language', 'send_day') ordering = ('study_id',) search_fields = ('study_id', 'display_name', 'connection__identity', 'anc_num')", "mwbase.Connection extra = 0 class NoteInline(admin.TabularInline): model = mwbase.Note extra = 1 def", "'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'call_type', 'arrived', 'scheduled')", "self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ], context)", "'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username', 'password_changed')", "str(opts.verbose_name_plural), 'opts': opts, 'counts': counts, 'existing': existing, 'diff': diff, 'error': error, **(extra_context or", "= ('user', 'event', 'created') class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class UserAdmin(UserAdmin): inlines =", "'call_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility',", "AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class", "participant_name(self, obj): participant = getattr(obj, self.participant_field) if participant is not None: return html.format_html(", "'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total, errors ) =", "# error = \"There was an error importing the given file. Please try", "class ParticipantAdminMixin(object): participant_field = 'participant' def participant_name(self, obj): participant = getattr(obj, self.participant_field) if", "href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display =", "return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self, obj): return getattr(obj, self.participant_field).study_id study_id.short_description =", "'is_related', 'external_success') date_hierarchy = 'created' search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields = ('created',", "c.set_status('quit', comment='Status set from bulk quit action') mark_quit.short_description = 'Mark participant as quit'", "is not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message)", "= ('outcome', 'is_outgoing') readonly_fields = ('created', 'modified') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class", "= ('status', 'call_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display", "= [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls", "('study_id',) search_fields = ('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system', 'created', 'modified')", "def participant_name(self, obj): participant = getattr(obj, self.participant_field) if participant is not None: return", "'{}__study_id'.format(participant_field) def facility(self, obj): participant = getattr(obj, self.participant_field) if participant is not None:", "from bulk action') revert_status.short_description = 'Revert to last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display", "obj): return getattr(obj, self.participant_field).study_id study_id.short_description = 'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self,", "admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english') list_filter = ('send_base',", "errors ) = sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response = JsonResponse({ 'url': url, 'duplicates':", "participant in queryset to their previous status ''' for c in queryset: old_status", "'participant__display_name', 'connection__identity') readonly_fields = ('created', 'modified') def identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description", "return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin):", "return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def", "extra_context=extra_context) def get_urls(self): urls = super().get_urls() my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/',", "('text', 'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created') list_filter = ('is_system', 'is_outgoing',", "(app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ], context) def smsbank_check_view(self, request, extra_context=None): opts", "getattr(obj, self.participant_field) if participant is not None: return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def", "inlines = (ConnectionInline, NoteInline) actions = (mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field = 'participant'", "'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'call_type', 'arrived', 'scheduled') search_fields =", "readonly_fields = ('created', 'modified') def identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number'", "= ('facility', 'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'old',", "diff= sms_bank.import_messages(file) # except Exception as e: # print(e) # error = \"There", "changelist_view(self, request, extra_context=None): extra_context = extra_context or {} extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin,", "reverted from bulk action') revert_status.short_description = 'Revert to last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin):", "not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field = '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class", "is not None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name' participant_name.admin_order_field", "from django.utils import html from openpyxl.writer.excel import save_virtual_workbook import utils.admin as utils #", "('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy = 'created' list_filter = ('outcome', 'is_outgoing')", "= ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'call_type', 'scheduled',", "@admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name', 'preg_status', 'sms_status', 'description', 'facility', 'phone_number', 'due_date',", "admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english') list_filter = ('send_base', 'condition',", "ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity', 'participant_name', 'facility', 'is_primary') search_fields = ('participant__study_id', 'participant__display_name', 'identity')", "('facility', 'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'old', 'new',", "= 'Number' identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name',", "= ('status', 'visit_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin):", "'created') class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class UserAdmin(UserAdmin): inlines = (PractitionerInline,) # Re-register", "identity.admin_order_field = 'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'phone_number', 'outcome',", "search_fields = ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name',", "= 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity', 'participant_name', 'facility', 'is_primary') search_fields", "''' set the status for each participant in queryset to their previous status", "'{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text', 'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply',", "app_label = opts.app_label items = duplicates = descriptions = total = None form", "None, request.FILES or None) if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\")", "list_display = ('identity', 'participant_name', 'facility', 'is_primary') search_fields = ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class", "@admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display", "change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None):", "= \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None): extra_context = extra_context or", "@admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity', 'participant_name', 'facility', 'is_primary') search_fields = ('participant__study_id',", "sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response = JsonResponse({ 'url': url, 'duplicates': duplicates, 'errors': errors,", "search_fields = ('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields = ('last_msg_client', 'last_msg_system', 'created', 'modified') inlines", "extra = 0 class NoteInline(admin.TabularInline): model = mwbase.Note extra = 1 def mark_quit(modeladmin,", "'{}__study_id'.format(participant_field) def phone_number(self, obj): connection = getattr(obj, self.participant_field).connection() if connection is not None:", "= 1 def mark_quit(modeladmin, request, queryset): ''' mark all mwbase in queryset as", "Local Imports from mwbase import models as mwbase from mwbase.forms import ImportXLSXForm from", "get_urls(self): urls = super().get_urls() my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'),", "'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy = 'created' search_fields", "list_filter = ('facility', 'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day') ordering = ('study_id',)", "= (PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display =", "'opts': opts, 'counts': counts, 'existing': existing, 'diff': diff, 'error': error, **(extra_context or {}),", "('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('participant_name', 'comment', 'created') date_hierarchy =", "ImportXLSXForm from mwbase.utils import sms_bank import swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant =", "errors, 'total': total, 'success': True, }) return response else: return JsonResponse({'success': False, 'message':", "= ('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy = 'created' list_filter = ('outcome',", "request.FILES or None) if request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") (items,", "'old', 'new', 'type', 'created') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display =", "'facility', 'is_primary') search_fields = ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display =", "list_display = ('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled'", "= swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline):", "[], [] error = \"\" if request.method == 'POST': if form.is_valid(): file =", "'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") # try: counts, existing, diff= sms_bank.import_messages(file) #", "class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display =", "date_hierarchy = 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity', 'participant_name', 'facility', 'is_primary')", "TemplateResponse from django.urls import path, reverse from django.utils import html from openpyxl.writer.excel import", "for each participant in queryset to their previous status ''' for c in", "ParticipantAdminMixin): list_display = ('participant_name', 'comment', 'created') date_hierarchy = 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin):", "app_label = opts.app_label form = ImportXLSXForm(request.POST or None, request.FILES or None) counts, existing,", "'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created') list_filter = ('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created',", "participant is not None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name'", "'comment', 'created') date_hierarchy = 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('identity', 'participant_name',", "= ('created', 'modified') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display =", "'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count',", "'created' list_filter = ('outcome', 'is_outgoing') readonly_fields = ('created', 'modified') search_fields = ('participant__study_id', 'participant__display_name')", ") = sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response = JsonResponse({ 'url': url, 'duplicates': duplicates,", "'created') date_hierarchy = 'created' list_filter = ('outcome', 'is_outgoing') readonly_fields = ('created', 'modified') search_fields", "queryset to their previous status ''' for c in queryset: old_status = c.statuschange_set.last().old", "from bulk quit action') mark_quit.short_description = 'Mark participant as quit' def revert_status(modeladmin, request,", "'language', 'send_day', 'is_validated', 'created') list_display_links = ('study_id', 'display_name') list_filter = ('facility', 'study_group', ('created',", "list_display = ('facility', 'username', 'password_changed') @admin.register(StatusChange) class StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name',", "= form.cleaned_data.get(\"file\") # try: counts, existing, diff= sms_bank.import_messages(file) # except Exception as e:", "'phone_number', 'due_date', 'language', 'send_day', 'is_validated', 'created') list_display_links = ('study_id', 'display_name') list_filter = ('facility',", "'scheduled' list_filter = ('status', 'visit_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class", "'description', 'facility', 'phone_number', 'due_date', 'language', 'send_day', 'is_validated', 'created') list_display_links = ('study_id', 'display_name') list_filter", "html from openpyxl.writer.excel import save_virtual_workbook import utils.admin as utils # Local Imports from", "'participant_name', 'facility', 'is_primary') search_fields = ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display", "html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self,", "list_filter = ('outcome', 'is_outgoing') readonly_fields = ('created', 'modified') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note)", "'is_outgoing', 'created') date_hierarchy = 'created' list_filter = ('outcome', 'is_outgoing') readonly_fields = ('created', 'modified')", "from mwbase.forms import ImportXLSXForm from mwbase.utils import sms_bank import swapper AutomatedMessage = swapper.load_model(\"mwbase\",", "super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls = super().get_urls() my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view),", "= ('created', 'modified') def identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field", "revert_status,) class ParticipantAdminMixin(object): participant_field = 'participant' def participant_name(self, obj): participant = getattr(obj, self.participant_field)", "('created', 'modified') def identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description = 'Number' identity.admin_order_field =", "save ''' for c in queryset: c.set_status('quit', comment='Status set from bulk quit action')", "'send_day') ordering = ('study_id',) search_fields = ('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields = ('last_msg_client',", "'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ], context) def smsbank_check_view(self, request,", "opts = self.model._meta app_label = opts.app_label form = ImportXLSXForm(request.POST or None, request.FILES or", "mark_quit.short_description = 'Mark participant as quit' def revert_status(modeladmin, request, queryset): ''' set the", "given file. Please try again.\" context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts,", "error, **(extra_context or {}), } return TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' % (app_label,", "None, request.FILES or None) counts, existing, diff= [], [], [] error = \"\"", "= mwbase.Connection extra = 0 class NoteInline(admin.TabularInline): model = mwbase.Note extra = 1", "'study_group', ('created', admin.DateFieldListFilter), 'preg_status', 'is_validated', 'language', 'send_day') ordering = ('study_id',) search_fields = ('study_id',", "request.method == 'POST': if form.is_valid(): file = form.cleaned_data.get(\"file\") # try: counts, existing, diff=", "'success': True, }) return response else: return JsonResponse({'success': False, 'message': 'Form Invalid',}) else:", "connection = getattr(obj, self.participant_field).connection() if connection is not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description", "url, 'duplicates': duplicates, 'errors': errors, 'total': total, 'success': True, }) return response else:", "('identity', 'participant_name', 'facility', 'is_primary') search_fields = ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin):", "return getattr(obj, self.participant_field).study_id study_id.short_description = 'Study ID' study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self, obj):", "date_hierarchy = 'scheduled' list_filter = ('status', 'visit_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name')", "reverse('admin:smsbank_import_view') response = JsonResponse({ 'url': url, 'duplicates': duplicates, 'errors': errors, 'total': total, 'success':", "ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy = 'created' list_filter", "'status') date_hierarchy = 'scheduled' list_filter = ('status', 'call_type', 'arrived', 'scheduled') search_fields = ('participant__study_id',", "set the status for each participant in queryset to their previous status '''", "if connection is not None: return html.format_html(\"<a href='../connection/{0.pk}'>{0.identity}</a>\".format(connection)) phone_number.short_description = 'Number' phone_number.admin_order_field =", "(items, duplicates, descriptions, total, errors ) = sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response =", "'english') list_filter = ('send_base', 'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template", "descriptions, total, errors ) = sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response = JsonResponse({ 'url':", "**(extra_context or {}), } return TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name),", "{}), } return TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' %", "request, extra_context=None): extra_context = extra_context or {} extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request,", "\"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field) def facility(self, obj):", "('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event', 'created') class PractitionerInline(admin.TabularInline): model", "= sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return response def", "import SimpleTemplateResponse, TemplateResponse from django.urls import path, reverse from django.utils import html from", "= mwbase.Practitioner class UserAdmin(UserAdmin): inlines = (PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin)", "'last_msg_system', 'created', 'modified') inlines = (ConnectionInline, NoteInline) actions = (mark_quit, revert_status,) class ParticipantAdminMixin(object):", "'is_primary') search_fields = ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id',", "StatusChangeAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'old', 'new', 'type', 'created') search_fields = ('participant__study_id',", "def smsbank_check_view(self, request, extra_context=None): opts = self.model._meta app_label = opts.app_label items = duplicates", "'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen',", "participant = getattr(obj, self.participant_field) if participant is not None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id})", "'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status', 'call_type',", "'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created') list_filter = ('is_system', 'is_outgoing', 'external_status',", "= ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls = super().get_urls() my_urls =", "ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy =", "the status for each participant in queryset to their previous status ''' for", "opts.app_label form = ImportXLSXForm(request.POST or None, request.FILES or None) counts, existing, diff= [],", "import models as mwbase from mwbase.forms import ImportXLSXForm from mwbase.utils import sms_bank import", "content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self, request, extra_context=None): opts =", "try: counts, existing, diff= sms_bank.import_messages(file) # except Exception as e: # print(e) #", "set from bulk quit action') mark_quit.short_description = 'Mark participant as quit' def revert_status(modeladmin,", "PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class UserAdmin(UserAdmin): inlines = (PractitionerInline,) # Re-register UserAdmin admin.site.unregister(User)", "from openpyxl.writer.excel import save_virtual_workbook import utils.admin as utils # Local Imports from mwbase", "the given file. Please try again.\" context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts':", "} return TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label,", "= swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model = mwbase.Connection extra", "'connection__identity') readonly_fields = ('created', 'modified') def identity(self, obj): return html.format_html(\"<a href='./?q={0.identity}'>{0.identity}</a>\".format(obj.connection)) identity.short_description =", "queryset as quit and save ''' for c in queryset: c.set_status('quit', comment='Status set", "participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self, obj): return getattr(obj, self.participant_field).study_id study_id.short_description = 'Study", "as quit' def revert_status(modeladmin, request, queryset): ''' set the status for each participant", "readonly_fields = ('last_msg_client', 'last_msg_system', 'created', 'modified') inlines = (ConnectionInline, NoteInline) actions = (mark_quit,", "('comment', 'participant_name', 'old', 'new', 'type', 'created') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin):", "django.http import JsonResponse from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import path, reverse", "study_id.admin_order_field = '{}__study_id'.format(participant_field) def phone_number(self, obj): connection = getattr(obj, self.participant_field).connection() if connection is", "def get_urls(self): urls = super().get_urls() my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view),", "'is_outgoing') readonly_fields = ('created', 'modified') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin):", "Please try again.\" context = { **self.admin_site.each_context(request), 'module_name': str(opts.verbose_name_plural), 'opts': opts, 'counts': counts,", "# print(e) # error = \"There was an error importing the given file.", "class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name', 'preg_status', 'sms_status', 'description', 'facility', 'phone_number', 'due_date', 'language',", "list_display = ('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy = 'created' list_filter =", "class NoteInline(admin.TabularInline): model = mwbase.Note extra = 1 def mark_quit(modeladmin, request, queryset): '''", "'outcome', 'is_outgoing', 'created') date_hierarchy = 'created' list_filter = ('outcome', 'is_outgoing') readonly_fields = ('created',", "in queryset: c.set_status('quit', comment='Status set from bulk quit action') mark_quit.short_description = 'Mark participant", "django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import path, reverse from django.utils import html", "obj): participant = getattr(obj, self.participant_field) if participant is not None: return participant.facility.capitalize() facility.admin_order_field", "MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text', 'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created')", "'translation_status', 'created') list_filter = ('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status',", "= \"There was an error importing the given file. Please try again.\" context", "None form = ImportXLSXForm(request.POST or None, request.FILES or None) if request.method == 'POST':", "{} extra_context['form'] = ImportXLSXForm return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls = super().get_urls()", "def revert_status(modeladmin, request, queryset): ''' set the status for each participant in queryset", "'connection__identity' @admin.register(mwbase.PhoneCall) class PhoneCallAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('comment', 'participant_name', 'phone_number', 'outcome', 'is_outgoing', 'created')", "Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\") class ConnectionInline(admin.TabularInline): model = mwbase.Connection", "'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name',", "mark all mwbase in queryset as quit and save ''' for c in", "utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility', 'translation_status', 'is_related', 'external_success') date_hierarchy = 'created' search_fields = ('participant__study_id',", "readonly_fields = ('created', 'modified') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Note) class NoteAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display", "or None, request.FILES or None) counts, existing, diff= [], [], [] error =", "path, reverse from django.utils import html from openpyxl.writer.excel import save_virtual_workbook import utils.admin as", "opts = self.model._meta app_label = opts.app_label items = duplicates = descriptions = total", "import swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\",", "swapper AutomatedMessage = swapper.load_model(\"mwbase\", \"AutomatedMessage\") Participant = swapper.load_model(\"mwbase\", \"Participant\") StatusChange = swapper.load_model(\"mwbase\", \"StatusChange\")", "('status', 'call_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display =", "as quit and save ''' for c in queryset: c.set_status('quit', comment='Status set from", "'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id',", "TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' % (app_label, opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ],", "= ImportXLSXForm(request.POST or None, request.FILES or None) if request.method == 'POST': if form.is_valid():", "None: return html.format_html( \"<a href='../participant/{0.pk}'>({0.study_id}) {0.display_name}</a>\".format(participant)) participant_name.short_description = 'SMS Name' participant_name.admin_order_field = '{}__study_id'.format(participant_field)", "ParticipantAdminMixin): list_display = ('text', 'participant_name', 'identity', 'is_system', 'is_outgoing', 'is_reply', 'external_status', 'translation_status', 'created') list_filter", "list_display = ('study_id', 'display_name', 'preg_status', 'sms_status', 'description', 'facility', 'phone_number', 'due_date', 'language', 'send_day', 'is_validated',", "'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.Practitioner) class PractitionerAdmin(admin.ModelAdmin): list_display = ('facility', 'username',", "sms_bank.create_xlsx() response = HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') response['Content-Disposition'] = 'attachment; filename=\"smsbank.xlsx\"' return response def smsbank_import_view(self,", "last status' @admin.register(Participant) class ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name', 'preg_status', 'sms_status', 'description', 'facility',", "list_display = ('user', 'event', 'created') class PractitionerInline(admin.TabularInline): model = mwbase.Practitioner class UserAdmin(UserAdmin): inlines", "None) counts, existing, diff= [], [], [] error = \"\" if request.method ==", "= ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields = ('created', 'modified') def identity(self, obj): return html.format_html(\"<a", "('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit) class VisitAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'visit_type', 'scheduled',", "ParticipantAdminMixin): list_display = ('identity', 'participant_name', 'facility', 'is_primary') search_fields = ('participant__study_id', 'participant__display_name', 'identity') @admin.register(mwbase.Visit)", "'phone_number', 'outcome', 'is_outgoing', 'created') date_hierarchy = 'created' list_filter = ('outcome', 'is_outgoing') readonly_fields =", "'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count',", "ParticipantAdmin(admin.ModelAdmin): list_display = ('study_id', 'display_name', 'preg_status', 'sms_status', 'description', 'facility', 'phone_number', 'due_date', 'language', 'send_day',", "smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\" smsbank_import_template = \"admin/mwbase/automatedmessage/sms_bank_import.html\" def changelist_view(self, request, extra_context=None): extra_context = extra_context", "= '{}__connection__identity'.format(participant_field) @admin.register(mwbase.Message) class MessageAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('text', 'participant_name', 'identity', 'is_system', 'is_outgoing',", "% app_label, 'admin/sms_bank_import.html' ], context) def smsbank_check_view(self, request, extra_context=None): opts = self.model._meta app_label", "'external_status', 'translation_status', 'created') list_filter = ('is_system', 'is_outgoing', 'external_status', ('participant', utils.NullFieldListFilter), ('created', admin.DateFieldListFilter), 'connection__participant__facility',", "opts.app_label items = duplicates = descriptions = total = None form = ImportXLSXForm(request.POST", "= opts.app_label form = ImportXLSXForm(request.POST or None, request.FILES or None) counts, existing, diff=", "list_filter = ('status', 'visit_type', 'arrived', 'scheduled') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin,", "date_hierarchy = 'created' search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields = ('created', 'modified') def", "my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ]", "urls = super().get_urls() my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/',", "form.cleaned_data.get(\"file\") (items, duplicates, descriptions, total, errors ) = sms_bank.check_messages(file) url = reverse('admin:smsbank_import_view') response", "list_display = ('participant_name', 'comment', 'created') date_hierarchy = 'created' @admin.register(mwbase.Connection) class ConnectionAdmin(admin.ModelAdmin, ParticipantAdminMixin): list_display", "return super(AutomatedMessageAdmin, self).changelist_view(request, extra_context=extra_context) def get_urls(self): urls = super().get_urls() my_urls = [ path(r'smsbank_check_view/',", "= super().get_urls() my_urls = [ path(r'smsbank_check_view/', self.admin_site.admin_view(self.smsbank_check_view), name='smsbank_check_view'), path(r'smsbank_import_view/', self.admin_site.admin_view(self.smsbank_import_view), name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx),", "HttpResponse from django.http import JsonResponse from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import", "self.participant_field) if participant is not None: return participant.facility.capitalize() facility.admin_order_field = '{}__facility'.format(participant_field) def study_id(self,", "= 'Mark participant as quit' def revert_status(modeladmin, request, queryset): ''' set the status", "'language', 'send_day') ordering = ('study_id',) search_fields = ('study_id', 'display_name', 'connection__identity', 'anc_num') readonly_fields =", "('description', 'english') list_filter = ('send_base', 'condition', 'group') change_list_template = \"admin/mwbase/automatedmessage/change_list.html\" smsbank_check_template = \"admin/mwbase/automatedmessage/sms_bank_check.html\"", "name='smsbank_import_view'), path(r'smsbank_create_xlsx/', self.admin_site.admin_view(self.smsbank_create_xlsx), name='smsbank_create_xlsx') ] urls = my_urls + urls return urls def", "'participant_name', 'call_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status',", "'created' search_fields = ('participant__study_id', 'participant__display_name', 'connection__identity') readonly_fields = ('created', 'modified') def identity(self, obj):", "total = None form = ImportXLSXForm(request.POST or None, request.FILES or None) if request.method", "opts.model_name), 'admin/%s/sms_bank_import.html' % app_label, 'admin/sms_bank_import.html' ], context) def smsbank_check_view(self, request, extra_context=None): opts =", "all mwbase in queryset as quit and save ''' for c in queryset:", "= (mark_quit, revert_status,) class ParticipantAdminMixin(object): participant_field = 'participant' def participant_name(self, obj): participant =", "'created') search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.EventLog) class EventLogAdmin(admin.ModelAdmin): list_display = ('user', 'event', 'created')", "'duplicates': duplicates, 'errors': errors, 'total': total, 'success': True, }) return response else: return", "'error': error, **(extra_context or {}), } return TemplateResponse(request, self.smsbank_import_template or [ 'admin/%s/%s/sms_bank_import.html' %", "or None, request.FILES or None) if request.method == 'POST': if form.is_valid(): file =", "mark_quit(modeladmin, request, queryset): ''' mark all mwbase in queryset as quit and save", "from django.http import JsonResponse from django.template.response import SimpleTemplateResponse, TemplateResponse from django.urls import path,", "mwbase in queryset as quit and save ''' for c in queryset: c.set_status('quit',", "# Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, UserAdmin) @admin.register(AutomatedMessage) class AutomatedMessageAdmin(admin.ModelAdmin): list_display = ('description', 'english')", "search_fields = ('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'call_type',", "('participant__study_id', 'participant__display_name') @admin.register(mwbase.ScheduledPhoneCall) class ScheduledPhoneCall(admin.ModelAdmin, ParticipantAdminMixin): list_display = ('study_id', 'participant_name', 'call_type', 'scheduled', 'notification_last_seen',", "extra_context=None): opts = self.model._meta app_label = opts.app_label form = ImportXLSXForm(request.POST or None, request.FILES", "[], [], [] error = \"\" if request.method == 'POST': if form.is_valid(): file", "'participant_name', 'visit_type', 'scheduled', 'notification_last_seen', 'notify_count', 'arrived', 'status') date_hierarchy = 'scheduled' list_filter = ('status',", "import path, reverse from django.utils import html from openpyxl.writer.excel import save_virtual_workbook import utils.admin" ]
[ "values nc.variables[vname][idx, :, :] = np.ma.array( ds[vname].values, mask=np.isnan(ds[vname].values) ) if __name__ == '__main__':", "= logger() if len(argv) == 6: valid = utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4]))", "valid = utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid)", "as np from pyiem.util import utc, ncopen, logger from pyiem import iemre def", "== 6: valid = utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx", "could contain NaN values nc.variables[vname][idx, :, :] = np.ma.array( ds[vname].values, mask=np.isnan(ds[vname].values) ) if", "int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]),", "copy, otherwise hourly. see: akrherz/iem#199 \"\"\" import sys import datetime import numpy as", "int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3]))", "import datetime import numpy as np from pyiem.util import utc, ncopen, logger from", "'a', timeout=600) as nc: for vname in ds: if vname not in nc.variables:", "<year> <month> <day> <utchour> If hour and minute are omitted, this is a", "iemre def main(argv): \"\"\"Go Main Go.\"\"\" log = logger() if len(argv) == 6:", "= iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn =", "in ds: if vname not in nc.variables: continue log.debug(\"copying database var %s to", "ncopen, logger from pyiem import iemre def main(argv): \"\"\"Go Main Go.\"\"\" log =", "<day> <utchour> If hour and minute are omitted, this is a daily copy,", "this is a daily copy, otherwise hourly. see: akrherz/iem#199 \"\"\" import sys import", "daily copy, otherwise hourly. see: akrherz/iem#199 \"\"\" import sys import datetime import numpy", "Example: python db_to_netcdf.py <year> <month> <day> <utchour> If hour and minute are omitted,", "netcdf. Example: python db_to_netcdf.py <year> <month> <day> <utchour> If hour and minute are", "hourly. see: akrherz/iem#199 \"\"\" import sys import datetime import numpy as np from", "len(argv) == 6: valid = utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year)", "from pyiem.util import utc, ncopen, logger from pyiem import iemre def main(argv): \"\"\"Go", "6: valid = utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx =", "ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600)", "logger() if len(argv) == 6: valid = utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn", "ds: if vname not in nc.variables: continue log.debug(\"copying database var %s to netcdf\",", "\"\"\"Copy database grids to netcdf. Example: python db_to_netcdf.py <year> <month> <day> <utchour> If", "datetime import numpy as np from pyiem.util import utc, ncopen, logger from pyiem", "vname) # Careful here, ds could contain NaN values nc.variables[vname][idx, :, :] =", "%s to netcdf\", vname) # Careful here, ds could contain NaN values nc.variables[vname][idx,", "timeout=600) as nc: for vname in ds: if vname not in nc.variables: continue", "vname in ds: if vname not in nc.variables: continue log.debug(\"copying database var %s", "log = logger() if len(argv) == 6: valid = utc( int(argv[1]), int(argv[2]), int(argv[3]),", "iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as nc:", "else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds", "<gh_stars>1-10 \"\"\"Copy database grids to netcdf. Example: python db_to_netcdf.py <year> <month> <day> <utchour>", "from pyiem import iemre def main(argv): \"\"\"Go Main Go.\"\"\" log = logger() if", "grids to netcdf. Example: python db_to_netcdf.py <year> <month> <day> <utchour> If hour and", "otherwise hourly. see: akrherz/iem#199 \"\"\" import sys import datetime import numpy as np", "utc, ncopen, logger from pyiem import iemre def main(argv): \"\"\"Go Main Go.\"\"\" log", "= iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as nc: for vname in ds: if", "with ncopen(ncfn, 'a', timeout=600) as nc: for vname in ds: if vname not", "int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn, 'a',", "= utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else:", "minute are omitted, this is a daily copy, otherwise hourly. see: akrherz/iem#199 \"\"\"", "are omitted, this is a daily copy, otherwise hourly. see: akrherz/iem#199 \"\"\" import", "and minute are omitted, this is a daily copy, otherwise hourly. see: akrherz/iem#199", "ds = iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as nc: for vname in ds:", "NaN values nc.variables[vname][idx, :, :] = np.ma.array( ds[vname].values, mask=np.isnan(ds[vname].values) ) if __name__ ==", "as nc: for vname in ds: if vname not in nc.variables: continue log.debug(\"copying", "iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid)", "if vname not in nc.variables: continue log.debug(\"copying database var %s to netcdf\", vname)", "vname not in nc.variables: continue log.debug(\"copying database var %s to netcdf\", vname) #", "hour and minute are omitted, this is a daily copy, otherwise hourly. see:", "import utc, ncopen, logger from pyiem import iemre def main(argv): \"\"\"Go Main Go.\"\"\"", "see: akrherz/iem#199 \"\"\" import sys import datetime import numpy as np from pyiem.util", "logger from pyiem import iemre def main(argv): \"\"\"Go Main Go.\"\"\" log = logger()", "nc: for vname in ds: if vname not in nc.variables: continue log.debug(\"copying database", "ds could contain NaN values nc.variables[vname][idx, :, :] = np.ma.array( ds[vname].values, mask=np.isnan(ds[vname].values) )", "nc.variables[vname][idx, :, :] = np.ma.array( ds[vname].values, mask=np.isnan(ds[vname].values) ) if __name__ == '__main__': main(sys.argv)", "here, ds could contain NaN values nc.variables[vname][idx, :, :] = np.ma.array( ds[vname].values, mask=np.isnan(ds[vname].values)", "= iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as", "<month> <day> <utchour> If hour and minute are omitted, this is a daily", "log.debug(\"copying database var %s to netcdf\", vname) # Careful here, ds could contain", "akrherz/iem#199 \"\"\" import sys import datetime import numpy as np from pyiem.util import", "If hour and minute are omitted, this is a daily copy, otherwise hourly.", "iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as nc: for vname in ds: if vname", "= iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx =", "db_to_netcdf.py <year> <month> <day> <utchour> If hour and minute are omitted, this is", "int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid =", "nc.variables: continue log.debug(\"copying database var %s to netcdf\", vname) # Careful here, ds", "idx = iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx", "idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as nc: for", "for vname in ds: if vname not in nc.variables: continue log.debug(\"copying database var", "pyiem.util import utc, ncopen, logger from pyiem import iemre def main(argv): \"\"\"Go Main", "import sys import datetime import numpy as np from pyiem.util import utc, ncopen,", "import numpy as np from pyiem.util import utc, ncopen, logger from pyiem import", "Main Go.\"\"\" log = logger() if len(argv) == 6: valid = utc( int(argv[1]),", "\"\"\"Go Main Go.\"\"\" log = logger() if len(argv) == 6: valid = utc(", "ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn", "contain NaN values nc.variables[vname][idx, :, :] = np.ma.array( ds[vname].values, mask=np.isnan(ds[vname].values) ) if __name__", "netcdf\", vname) # Careful here, ds could contain NaN values nc.variables[vname][idx, :, :]", "int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]),", "import iemre def main(argv): \"\"\"Go Main Go.\"\"\" log = logger() if len(argv) ==", "to netcdf\", vname) # Careful here, ds could contain NaN values nc.variables[vname][idx, :,", "pyiem import iemre def main(argv): \"\"\"Go Main Go.\"\"\" log = logger() if len(argv)", "python db_to_netcdf.py <year> <month> <day> <utchour> If hour and minute are omitted, this", "in nc.variables: continue log.debug(\"copying database var %s to netcdf\", vname) # Careful here,", "def main(argv): \"\"\"Go Main Go.\"\"\" log = logger() if len(argv) == 6: valid", "to netcdf. Example: python db_to_netcdf.py <year> <month> <day> <utchour> If hour and minute", "omitted, this is a daily copy, otherwise hourly. see: akrherz/iem#199 \"\"\" import sys", "valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds =", "<utchour> If hour and minute are omitted, this is a daily copy, otherwise", "ncopen(ncfn, 'a', timeout=600) as nc: for vname in ds: if vname not in", "Careful here, ds could contain NaN values nc.variables[vname][idx, :, :] = np.ma.array( ds[vname].values,", "iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as nc: for vname in", "= iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn, 'a', timeout=600) as nc: for vname", "= datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid)", "\"\"\" import sys import datetime import numpy as np from pyiem.util import utc,", "continue log.debug(\"copying database var %s to netcdf\", vname) # Careful here, ds could", "a daily copy, otherwise hourly. see: akrherz/iem#199 \"\"\" import sys import datetime import", "Go.\"\"\" log = logger() if len(argv) == 6: valid = utc( int(argv[1]), int(argv[2]),", "database var %s to netcdf\", vname) # Careful here, ds could contain NaN", "int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid) with ncopen(ncfn,", "datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year) idx = iemre.daily_offset(valid) ds = iemre.get_grids(valid) with", "if len(argv) == 6: valid = utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn =", "utc( int(argv[1]), int(argv[2]), int(argv[3]), int(argv[4])) ncfn = iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid", "main(argv): \"\"\"Go Main Go.\"\"\" log = logger() if len(argv) == 6: valid =", "numpy as np from pyiem.util import utc, ncopen, logger from pyiem import iemre", "not in nc.variables: continue log.debug(\"copying database var %s to netcdf\", vname) # Careful", "iemre.get_hourly_ncname(valid.year) idx = iemre.hourly_offset(valid) else: valid = datetime.date(int(argv[1]), int(argv[2]), int(argv[3])) ncfn = iemre.get_daily_ncname(valid.year)", "is a daily copy, otherwise hourly. see: akrherz/iem#199 \"\"\" import sys import datetime", "var %s to netcdf\", vname) # Careful here, ds could contain NaN values", "np from pyiem.util import utc, ncopen, logger from pyiem import iemre def main(argv):", "sys import datetime import numpy as np from pyiem.util import utc, ncopen, logger", "# Careful here, ds could contain NaN values nc.variables[vname][idx, :, :] = np.ma.array(", "database grids to netcdf. Example: python db_to_netcdf.py <year> <month> <day> <utchour> If hour" ]
[ "#Kt = Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh):", "fenics import * from matplotlib.pyplot import show def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh", "= MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table =", "\"/mesh\", False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd,", "\"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table,", "= Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh): dx", "#hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG', 0) #MDSpace = FunctionSpace(mesh, 'DG', 0) #MD", "mesh,SD,bnd def read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol", "#hdf.read(mesh, \"/mesh\", False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1)", "def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf = HDF5File(mesh.mpi_comm(),path , \"r\")", "* from matplotlib.pyplot import show def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh()", "\"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG', 0)", "= assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of parenchyma in mm³: \",tot_parenchyma_vol) return vol_scale", "read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf = HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh,", "path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf = HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\",", "#MD = Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return", "Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of parenchyma in mm³:", "matplotlib.pyplot import show def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf =", "#File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx)", "'DG', 0) #MD = Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD", "#hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol", "mesh = Mesh() #hdf = HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\", False) SD =", "parenchyma in mm³: \",tot_parenchyma_vol) return vol_scale if __name__ == \"__main__\": mesh = read_brain_mesh_3D()", "print(\"Volume of parenchyma in mm³: \",tot_parenchyma_vol) return vol_scale if __name__ == \"__main__\": mesh", "import show def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf = HDF5File(mesh.mpi_comm(),path", "\"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf = HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\", False) SD", "'/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh)", "= \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf = HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\", False)", "= Mesh() #hdf = HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\", False) SD = MeshFunction(\"size_t\",", "Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh): dx =", "MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace =", "Mesh() #hdf = HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\", False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim())", "in mm³: \",tot_parenchyma_vol) return vol_scale if __name__ == \"__main__\": mesh = read_brain_mesh_3D() scale", "import * from matplotlib.pyplot import show def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh =", "from matplotlib.pyplot import show def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf", "Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def", "of parenchyma in mm³: \",tot_parenchyma_vol) return vol_scale if __name__ == \"__main__\": mesh =", "= MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG', 0) #MDSpace =", "bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table')", "TensorFunctionSpace(mesh, 'DG', 0) #MDSpace = FunctionSpace(mesh, 'DG', 0) #MD = Function(MDSpace) #Kt =", "#hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh): dx = Measure(\"dx\",", "0) #MDSpace = FunctionSpace(mesh, 'DG', 0) #MD = Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD,", "domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of parenchyma in mm³: \",tot_parenchyma_vol)", "from fenics import * from matplotlib.pyplot import show def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\"", "= Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd", "#hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG',", "= TensorFunctionSpace(mesh, 'DG', 0) #MDSpace = FunctionSpace(mesh, 'DG', 0) #MD = Function(MDSpace) #Kt", "mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh,", "read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of", "= HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\", False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\")", "mm³: \",tot_parenchyma_vol) return vol_scale if __name__ == \"__main__\": mesh = read_brain_mesh_3D() scale =", ", \"r\") #hdf.read(mesh, \"/mesh\", False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd =", "1.0/tot_parenchyma_vol print(\"Volume of parenchyma in mm³: \",tot_parenchyma_vol) return vol_scale if __name__ == \"__main__\":", "'/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG', 0) #MDSpace = FunctionSpace(mesh, 'DG', 0) #MD =", "#hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim())", "SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table", "assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of parenchyma in mm³: \",tot_parenchyma_vol) return vol_scale if", "tot_parenchyma_vol = assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of parenchyma in mm³: \",tot_parenchyma_vol) return", "mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG', 0) #MDSpace = FunctionSpace(mesh, 'DG', 0)", "#hdf = HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\", False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD,", "HDF5File(mesh.mpi_comm(),path , \"r\") #hdf.read(mesh, \"/mesh\", False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd", "'/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol =", "#TensorSpace = TensorFunctionSpace(mesh, 'DG', 0) #MDSpace = FunctionSpace(mesh, 'DG', 0) #MD = Function(MDSpace)", "False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\")", "0) #MD = Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI') #File('subdomains.pvd')<<SD #File('bnd.pvd')<<bnd", "mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh,", "dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of parenchyma", "show def read_brain_mesh_3D(): path = \"/home/asmund/dev/FEniCS-Brain-Flow/meshes/parenchyma16_with_DTI.h5\" mesh = Mesh() #hdf = HDF5File(mesh.mpi_comm(),path ,", "#lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG', 0) #MDSpace", "#File('bnd.pvd')<<bnd return mesh,SD,bnd def read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale", "= Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of parenchyma in", "#MDSpace = FunctionSpace(mesh, 'DG', 0) #MD = Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD, '/MD')", "return mesh,SD,bnd def read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale =", "= FunctionSpace(mesh, 'DG', 0) #MD = Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt,", "\"r\") #hdf.read(mesh, \"/mesh\", False) SD = MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\",", "mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG', 0) #MDSpace = FunctionSpace(mesh, 'DG',", "FunctionSpace(mesh, 'DG', 0) #MD = Function(MDSpace) #Kt = Function(TensorSpace) #hdf.read(MD, '/MD') #hdf.read(Kt, '/DTI')", "= 1.0/tot_parenchyma_vol print(\"Volume of parenchyma in mm³: \",tot_parenchyma_vol) return vol_scale if __name__ ==", "= MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace", "MeshFunction(\"size_t\", mesh, mesh.topology().dim()) #hdf.read(lookup_table, '/lookup_table') #TensorSpace = TensorFunctionSpace(mesh, 'DG', 0) #MDSpace = FunctionSpace(mesh,", "'DG', 0) #MDSpace = FunctionSpace(mesh, 'DG', 0) #MD = Function(MDSpace) #Kt = Function(TensorSpace)", "vol_scale = 1.0/tot_parenchyma_vol print(\"Volume of parenchyma in mm³: \",tot_parenchyma_vol) return vol_scale if __name__", "\",tot_parenchyma_vol) return vol_scale if __name__ == \"__main__\": mesh = read_brain_mesh_3D() scale = read_brain_scale(mesh)", "def read_brain_scale(mesh): dx = Measure(\"dx\", domain=mesh) tot_parenchyma_vol = assemble(1*dx) vol_scale = 1.0/tot_parenchyma_vol print(\"Volume", "MeshFunction(\"size_t\", mesh,mesh.topology().dim()) #hdf.read(SD, \"/subdomains\") bnd = MeshFunction(\"size_t\", mesh,mesh.topology().dim()-1) #hdf.read(bnd, \"/boundaries\") #lookup_table = MeshFunction(\"size_t\"," ]
[ "from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('djangosheet', '0003_lineup_1to1'), ]", "-*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations", "= [ ('djangosheet', '0003_lineup_1to1'), ] operations = [ migrations.AlterModelOptions( name='lineupentry', options={'verbose_name_plural': 'lineup entries',", "import models, migrations class Migration(migrations.Migration): dependencies = [ ('djangosheet', '0003_lineup_1to1'), ] operations =", "('djangosheet', '0003_lineup_1to1'), ] operations = [ migrations.AlterModelOptions( name='lineupentry', options={'verbose_name_plural': 'lineup entries', 'ordering': ['batting_position']},", "unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('djangosheet', '0003_lineup_1to1'),", "migrations class Migration(migrations.Migration): dependencies = [ ('djangosheet', '0003_lineup_1to1'), ] operations = [ migrations.AlterModelOptions(", "class Migration(migrations.Migration): dependencies = [ ('djangosheet', '0003_lineup_1to1'), ] operations = [ migrations.AlterModelOptions( name='lineupentry',", "coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class", "<gh_stars>1-10 # -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import", "import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('djangosheet',", "-*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies", "models, migrations class Migration(migrations.Migration): dependencies = [ ('djangosheet', '0003_lineup_1to1'), ] operations = [", "Migration(migrations.Migration): dependencies = [ ('djangosheet', '0003_lineup_1to1'), ] operations = [ migrations.AlterModelOptions( name='lineupentry', options={'verbose_name_plural':", "from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies =", "__future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [", "] operations = [ migrations.AlterModelOptions( name='lineupentry', options={'verbose_name_plural': 'lineup entries', 'ordering': ['batting_position']}, ), ]", "dependencies = [ ('djangosheet', '0003_lineup_1to1'), ] operations = [ migrations.AlterModelOptions( name='lineupentry', options={'verbose_name_plural': 'lineup", "'0003_lineup_1to1'), ] operations = [ migrations.AlterModelOptions( name='lineupentry', options={'verbose_name_plural': 'lineup entries', 'ordering': ['batting_position']}, ),", "utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration):", "django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('djangosheet', '0003_lineup_1to1'), ] operations", "[ ('djangosheet', '0003_lineup_1to1'), ] operations = [ migrations.AlterModelOptions( name='lineupentry', options={'verbose_name_plural': 'lineup entries', 'ordering':", "# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models," ]
[ "import time CACHE_TIMEOUT = 30 def update_cache(self, endpoint, **kwargs): data = raw_get(self, endpoint,", "cached_check return func_wrapper class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True, indexed=True) data = ndb.JsonProperty('d',", "oldtime if td.seconds > timeout: try: task_name = endpoint.replace('/', '-') + \\ '-%d'", "raw_get(self, endpoint, **kwargs) key = ndb.Key(CachedResponse, endpoint) cr = key.get() cr.data = data", "except TaskAlreadyExistsError: logging.critical('Task <%s> already exists.' % task_name) logging.critical('Could not update cache.') except", "from google.appengine.ext import ndb from google.appengine.ext import deferred from datetime import datetime from", "**kwargs) key = ndb.Key(CachedResponse, endpoint) cr = key.get() cr.data = data cr.put() def", "= cr.timestamp ts = time() currtime = datetime.utcfromtimestamp(ts) td = currtime - oldtime", "endpoint, **kwargs): data = raw_get(self, endpoint, **kwargs) key = ndb.Key(CachedResponse, endpoint) cr =", "deferred from datetime import datetime from time import time CACHE_TIMEOUT = 30 def", "import raw_get from google.appengine.api.taskqueue import TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext import", "= ndb.Key(CachedResponse, endpoint) cr = key.get() if not cr: data = func(self, endpoint,", "endpoint=endpoint, data=data) cr.put() else: oldtime = cr.timestamp ts = time() currtime = datetime.utcfromtimestamp(ts)", "from datetime import datetime from time import time CACHE_TIMEOUT = 30 def update_cache(self,", "key.get() cr.data = data cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self, endpoint, **kwargs):", "was: %s' % str(cr.timestamp)) logging.critical('Current time is: %s' % str(currtime)) return cr.data return", "if not cr: data = func(self, endpoint, **kwargs) cr = CachedResponse(key=key, endpoint=endpoint, data=data)", "CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else: oldtime = cr.timestamp ts = time() currtime =", "td = currtime - oldtime if td.seconds > timeout: try: task_name = endpoint.replace('/',", "% (int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s> already exists.'", "timeout: try: task_name = endpoint.replace('/', '-') + \\ '-%d' % (int(ts)) deferred.defer(update_cache, self,", "logging.critical('Stored timestamp was: %s' % str(cr.timestamp)) logging.critical('Current time is: %s' % str(currtime)) return", "= key.get() cr.data = data cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self, endpoint,", "= CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else: oldtime = cr.timestamp ts = time() currtime", "not update cache.') except TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.' % task_name) logging.critical('Attempting to", "data = func(self, endpoint, **kwargs) cr = CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else: oldtime", "def update_cache(self, endpoint, **kwargs): data = raw_get(self, endpoint, **kwargs) key = ndb.Key(CachedResponse, endpoint)", "task <%s> encountered.' % task_name) logging.critical('Attempting to serve old cache data.') logging.critical('Stored timestamp", "import logging from .util import raw_get from google.appengine.api.taskqueue import TaskAlreadyExistsError from google.appengine.api.taskqueue import", "key = ndb.Key(CachedResponse, endpoint) cr = key.get() if not cr: data = func(self,", "task_name = endpoint.replace('/', '-') + \\ '-%d' % (int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name,", "from google.appengine.api.taskqueue import TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext import ndb from", "raw_get from google.appengine.api.taskqueue import TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext import ndb", "**kwargs): data = raw_get(self, endpoint, **kwargs) key = ndb.Key(CachedResponse, endpoint) cr = key.get()", "exists.' % task_name) logging.critical('Could not update cache.') except TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.'", "deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s> already exists.' % task_name)", "import deferred from datetime import datetime from time import time CACHE_TIMEOUT = 30", "str(cr.timestamp)) logging.critical('Current time is: %s' % str(currtime)) return cr.data return cached_check return func_wrapper", "= func(self, endpoint, **kwargs) cr = CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else: oldtime =", "CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True, indexed=True) data = ndb.JsonProperty('d', required=True) timestamp = ndb.DateTimeProperty('t',", "key = ndb.Key(CachedResponse, endpoint) cr = key.get() cr.data = data cr.put() def cached(timeout=CACHE_TIMEOUT):", "= ndb.Key(CachedResponse, endpoint) cr = key.get() cr.data = data cr.put() def cached(timeout=CACHE_TIMEOUT): def", "def func_wrapper(func): def cached_check(self, endpoint, **kwargs): key = ndb.Key(CachedResponse, endpoint) cr = key.get()", "old cache data.') logging.critical('Stored timestamp was: %s' % str(cr.timestamp)) logging.critical('Current time is: %s'", "cr.timestamp ts = time() currtime = datetime.utcfromtimestamp(ts) td = currtime - oldtime if", "encountered.' % task_name) logging.critical('Attempting to serve old cache data.') logging.critical('Stored timestamp was: %s'", "func_wrapper class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True, indexed=True) data = ndb.JsonProperty('d', required=True) timestamp", "cr.data return cached_check return func_wrapper class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True, indexed=True) data", "td.seconds > timeout: try: task_name = endpoint.replace('/', '-') + \\ '-%d' % (int(ts))", ".util import raw_get from google.appengine.api.taskqueue import TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext", "logging from .util import raw_get from google.appengine.api.taskqueue import TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError", "%s' % str(currtime)) return cr.data return cached_check return func_wrapper class CachedResponse(ndb.Model): endpoint =", "<%s> encountered.' % task_name) logging.critical('Attempting to serve old cache data.') logging.critical('Stored timestamp was:", "except TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.' % task_name) logging.critical('Attempting to serve old cache", "cr.data = data cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self, endpoint, **kwargs): key", "import datetime from time import time CACHE_TIMEOUT = 30 def update_cache(self, endpoint, **kwargs):", "ts = time() currtime = datetime.utcfromtimestamp(ts) td = currtime - oldtime if td.seconds", "= 30 def update_cache(self, endpoint, **kwargs): data = raw_get(self, endpoint, **kwargs) key =", "import TombstonedTaskError from google.appengine.ext import ndb from google.appengine.ext import deferred from datetime import", "endpoint = ndb.StringProperty('e', required=True, indexed=True) data = ndb.JsonProperty('d', required=True) timestamp = ndb.DateTimeProperty('t', auto_now=True)", "google.appengine.api.taskqueue import TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext import ndb from google.appengine.ext", "data=data) cr.put() else: oldtime = cr.timestamp ts = time() currtime = datetime.utcfromtimestamp(ts) td", "time() currtime = datetime.utcfromtimestamp(ts) td = currtime - oldtime if td.seconds > timeout:", "str(currtime)) return cr.data return cached_check return func_wrapper class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True,", "logging.critical('Current time is: %s' % str(currtime)) return cr.data return cached_check return func_wrapper class", "oldtime = cr.timestamp ts = time() currtime = datetime.utcfromtimestamp(ts) td = currtime -", "% str(currtime)) return cr.data return cached_check return func_wrapper class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e',", "google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext import ndb from google.appengine.ext import deferred from datetime", "= currtime - oldtime if td.seconds > timeout: try: task_name = endpoint.replace('/', '-')", "+ \\ '-%d' % (int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task", "logging.critical('Attempting to serve old cache data.') logging.critical('Stored timestamp was: %s' % str(cr.timestamp)) logging.critical('Current", "TaskAlreadyExistsError: logging.critical('Task <%s> already exists.' % task_name) logging.critical('Could not update cache.') except TombstonedTaskError:", "from time import time CACHE_TIMEOUT = 30 def update_cache(self, endpoint, **kwargs): data =", "ndb from google.appengine.ext import deferred from datetime import datetime from time import time", "from .util import raw_get from google.appengine.api.taskqueue import TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError from", "endpoint.replace('/', '-') + \\ '-%d' % (int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs) except", "data.') logging.critical('Stored timestamp was: %s' % str(cr.timestamp)) logging.critical('Current time is: %s' % str(currtime))", "**kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s> already exists.' % task_name) logging.critical('Could not update cache.')", "% str(cr.timestamp)) logging.critical('Current time is: %s' % str(currtime)) return cr.data return cached_check return", "cr = CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else: oldtime = cr.timestamp ts = time()", "currtime = datetime.utcfromtimestamp(ts) td = currtime - oldtime if td.seconds > timeout: try:", "endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s> already exists.' % task_name) logging.critical('Could not", "cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self, endpoint, **kwargs): key = ndb.Key(CachedResponse, endpoint)", "logging.critical('Task <%s> already exists.' % task_name) logging.critical('Could not update cache.') except TombstonedTaskError: logging.critical('Tombstoned", "TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.' % task_name) logging.critical('Attempting to serve old cache data.')", "time CACHE_TIMEOUT = 30 def update_cache(self, endpoint, **kwargs): data = raw_get(self, endpoint, **kwargs)", "task_name) logging.critical('Attempting to serve old cache data.') logging.critical('Stored timestamp was: %s' % str(cr.timestamp))", "(int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s> already exists.' %", "endpoint) cr = key.get() cr.data = data cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def", "func_wrapper(func): def cached_check(self, endpoint, **kwargs): key = ndb.Key(CachedResponse, endpoint) cr = key.get() if", "update cache.') except TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.' % task_name) logging.critical('Attempting to serve", "%s' % str(cr.timestamp)) logging.critical('Current time is: %s' % str(currtime)) return cr.data return cached_check", "return cr.data return cached_check return func_wrapper class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True, indexed=True)", "func(self, endpoint, **kwargs) cr = CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else: oldtime = cr.timestamp", "= raw_get(self, endpoint, **kwargs) key = ndb.Key(CachedResponse, endpoint) cr = key.get() cr.data =", "ndb.Key(CachedResponse, endpoint) cr = key.get() if not cr: data = func(self, endpoint, **kwargs)", "% task_name) logging.critical('Attempting to serve old cache data.') logging.critical('Stored timestamp was: %s' %", "is: %s' % str(currtime)) return cr.data return cached_check return func_wrapper class CachedResponse(ndb.Model): endpoint", "to serve old cache data.') logging.critical('Stored timestamp was: %s' % str(cr.timestamp)) logging.critical('Current time", "cr = key.get() if not cr: data = func(self, endpoint, **kwargs) cr =", "= datetime.utcfromtimestamp(ts) td = currtime - oldtime if td.seconds > timeout: try: task_name", "datetime.utcfromtimestamp(ts) td = currtime - oldtime if td.seconds > timeout: try: task_name =", "task_name) logging.critical('Could not update cache.') except TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.' % task_name)", "time is: %s' % str(currtime)) return cr.data return cached_check return func_wrapper class CachedResponse(ndb.Model):", "cache.') except TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.' % task_name) logging.critical('Attempting to serve old", "> timeout: try: task_name = endpoint.replace('/', '-') + \\ '-%d' % (int(ts)) deferred.defer(update_cache,", "'-%d' % (int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s> already", "endpoint, **kwargs) cr = CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else: oldtime = cr.timestamp ts", "_name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s> already exists.' % task_name) logging.critical('Could not update", "google.appengine.ext import deferred from datetime import datetime from time import time CACHE_TIMEOUT =", "cr.put() else: oldtime = cr.timestamp ts = time() currtime = datetime.utcfromtimestamp(ts) td =", "% task_name) logging.critical('Could not update cache.') except TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.' %", "import ndb from google.appengine.ext import deferred from datetime import datetime from time import", "def cached_check(self, endpoint, **kwargs): key = ndb.Key(CachedResponse, endpoint) cr = key.get() if not", "import TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext import ndb from google.appengine.ext import", "datetime import datetime from time import time CACHE_TIMEOUT = 30 def update_cache(self, endpoint,", "- oldtime if td.seconds > timeout: try: task_name = endpoint.replace('/', '-') + \\", "from google.appengine.ext import deferred from datetime import datetime from time import time CACHE_TIMEOUT", "update_cache(self, endpoint, **kwargs): data = raw_get(self, endpoint, **kwargs) key = ndb.Key(CachedResponse, endpoint) cr", "logging.critical('Could not update cache.') except TombstonedTaskError: logging.critical('Tombstoned task <%s> encountered.' % task_name) logging.critical('Attempting", "if td.seconds > timeout: try: task_name = endpoint.replace('/', '-') + \\ '-%d' %", "key.get() if not cr: data = func(self, endpoint, **kwargs) cr = CachedResponse(key=key, endpoint=endpoint,", "= data cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self, endpoint, **kwargs): key =", "cr: data = func(self, endpoint, **kwargs) cr = CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else:", "already exists.' % task_name) logging.critical('Could not update cache.') except TombstonedTaskError: logging.critical('Tombstoned task <%s>", "CACHE_TIMEOUT = 30 def update_cache(self, endpoint, **kwargs): data = raw_get(self, endpoint, **kwargs) key", "self, endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s> already exists.' % task_name) logging.critical('Could", "timestamp was: %s' % str(cr.timestamp)) logging.critical('Current time is: %s' % str(currtime)) return cr.data", "def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self, endpoint, **kwargs): key = ndb.Key(CachedResponse, endpoint) cr", "google.appengine.ext import ndb from google.appengine.ext import deferred from datetime import datetime from time", "= key.get() if not cr: data = func(self, endpoint, **kwargs) cr = CachedResponse(key=key,", "ndb.Key(CachedResponse, endpoint) cr = key.get() cr.data = data cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func):", "TaskAlreadyExistsError from google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext import ndb from google.appengine.ext import deferred", "not cr: data = func(self, endpoint, **kwargs) cr = CachedResponse(key=key, endpoint=endpoint, data=data) cr.put()", "logging.critical('Tombstoned task <%s> encountered.' % task_name) logging.critical('Attempting to serve old cache data.') logging.critical('Stored", "time import time CACHE_TIMEOUT = 30 def update_cache(self, endpoint, **kwargs): data = raw_get(self,", "TombstonedTaskError from google.appengine.ext import ndb from google.appengine.ext import deferred from datetime import datetime", "cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self, endpoint, **kwargs): key = ndb.Key(CachedResponse, endpoint) cr =", "= time() currtime = datetime.utcfromtimestamp(ts) td = currtime - oldtime if td.seconds >", "\\ '-%d' % (int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError: logging.critical('Task <%s>", "endpoint, **kwargs): key = ndb.Key(CachedResponse, endpoint) cr = key.get() if not cr: data", "cr = key.get() cr.data = data cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self,", "**kwargs) cr = CachedResponse(key=key, endpoint=endpoint, data=data) cr.put() else: oldtime = cr.timestamp ts =", "try: task_name = endpoint.replace('/', '-') + \\ '-%d' % (int(ts)) deferred.defer(update_cache, self, endpoint,", "30 def update_cache(self, endpoint, **kwargs): data = raw_get(self, endpoint, **kwargs) key = ndb.Key(CachedResponse,", "endpoint, **kwargs) key = ndb.Key(CachedResponse, endpoint) cr = key.get() cr.data = data cr.put()", "else: oldtime = cr.timestamp ts = time() currtime = datetime.utcfromtimestamp(ts) td = currtime", "datetime from time import time CACHE_TIMEOUT = 30 def update_cache(self, endpoint, **kwargs): data", "'-') + \\ '-%d' % (int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs) except TaskAlreadyExistsError:", "= endpoint.replace('/', '-') + \\ '-%d' % (int(ts)) deferred.defer(update_cache, self, endpoint, _name=task_name, **kwargs)", "data cr.put() def cached(timeout=CACHE_TIMEOUT): def func_wrapper(func): def cached_check(self, endpoint, **kwargs): key = ndb.Key(CachedResponse,", "data = raw_get(self, endpoint, **kwargs) key = ndb.Key(CachedResponse, endpoint) cr = key.get() cr.data", "serve old cache data.') logging.critical('Stored timestamp was: %s' % str(cr.timestamp)) logging.critical('Current time is:", "currtime - oldtime if td.seconds > timeout: try: task_name = endpoint.replace('/', '-') +", "**kwargs): key = ndb.Key(CachedResponse, endpoint) cr = key.get() if not cr: data =", "return func_wrapper class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True, indexed=True) data = ndb.JsonProperty('d', required=True)", "cached_check(self, endpoint, **kwargs): key = ndb.Key(CachedResponse, endpoint) cr = key.get() if not cr:", "class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True, indexed=True) data = ndb.JsonProperty('d', required=True) timestamp =", "from google.appengine.api.taskqueue import TombstonedTaskError from google.appengine.ext import ndb from google.appengine.ext import deferred from", "endpoint) cr = key.get() if not cr: data = func(self, endpoint, **kwargs) cr", "return cached_check return func_wrapper class CachedResponse(ndb.Model): endpoint = ndb.StringProperty('e', required=True, indexed=True) data =", "cache data.') logging.critical('Stored timestamp was: %s' % str(cr.timestamp)) logging.critical('Current time is: %s' %", "<%s> already exists.' % task_name) logging.critical('Could not update cache.') except TombstonedTaskError: logging.critical('Tombstoned task" ]
[ "para ser convertido: ')) centimetro = metro * 100 milimetro = metro *", "= int(input('Informe um valor inteiro: ')) raiz = math.sqrt(n1) # também pode ser", "n1 = int(input('Informe um valor inteiro: ')) raiz = math.sqrt(n1) # também pode", ".format(n1 * 2, n1 * 3, raiz)) n2 = float(input('Informe uma nota de", "0 a 10: ')) media = ((n2 + n3) / 2) print('A sua", "{}.'.format(n, n - 1, n + 1)) n1 = int(input('Informe um valor inteiro:", "{}, a raiz quadrada é {}.' .format(n1 * 2, n1 * 3, raiz))", "valor inteiro: ')) raiz = math.sqrt(n1) # também pode ser feito como raiz", "de {} metros, é igual a {:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro))", "uma nota de 0 a 10: ')) n3 = float(input('Informe uma segunda nota", "* 3, raiz)) n2 = float(input('Informe uma nota de 0 a 10: '))", "antecessor {}, este é seu' ' sucessor {}.'.format(n, n - 1, n +", "uma segunda nota de 0 a 10: ')) media = ((n2 + n3)", "metros, é igual a {:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada =", "um valor inteiro: ')) raiz = math.sqrt(n1) # também pode ser feito como", "float(input('Digite um valor em metros para ser convertido: ')) centimetro = metro *", "n = int(input('Informe um valor inteiro: ')) print('Esté é o número digitado {},", "metro * 1000 print('Este valor de {} metros, é igual a {:.0f} centimetros", "valor inteiro: ')) print('Esté é o número digitado {}, este é seu antecessor", "um valor em metros para ser convertido: ')) centimetro = metro * 100", "format(tabuada, count, (tabuada * count))) # OUTRA FORMA DE FAZER A TABUADA, count", "print('A sua média é {:.2f}'.format(media)) metro = float(input('Digite um valor em metros para", "um número para apresentar sua tabuada')) contador = 0 for count in range(1,", "igual a {:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada = int(input('Informe um", "seu antecessor {}, este é seu' ' sucessor {}.'.format(n, n - 1, n", "sua tabuada')) contador = 0 for count in range(1, 10 + 1): print('{}", "0 for count in range(1, 10 + 1): print('{} x {} = {}'.", "# OUTRA FORMA DE FAZER A TABUADA, count é a mesma coisa que", "de 0 a 10: ')) media = ((n2 + n3) / 2) print('A", "** (1/2) print('O dobro é {}, o triplo é {}, a raiz quadrada", "# também pode ser feito como raiz = n1 ** (1/2) print('O dobro", "FAZER A TABUADA, count é a mesma coisa que X ou a mesma", "é igual a {:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada = int(input('Informe", "{:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada = int(input('Informe um número para", "a {:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada = int(input('Informe um número", "{}, este é seu' ' sucessor {}.'.format(n, n - 1, n + 1))", "')) print('Esté é o número digitado {}, este é seu antecessor {}, este", "int(input('Informe um número para apresentar sua tabuada')) contador = 0 for count in", "tabuada = int(input('Informe um número para apresentar sua tabuada')) contador = 0 for", "média é {:.2f}'.format(media)) metro = float(input('Digite um valor em metros para ser convertido:", "print('Esté é o número digitado {}, este é seu antecessor {}, este é", "o triplo é {}, a raiz quadrada é {}.' .format(n1 * 2, n1", "2) print('A sua média é {:.2f}'.format(media)) metro = float(input('Digite um valor em metros", "apresentar sua tabuada')) contador = 0 for count in range(1, 10 + 1):", "print('O dobro é {}, o triplo é {}, a raiz quadrada é {}.'", "centimetro = metro * 100 milimetro = metro * 1000 print('Este valor de", "= 0 for count in range(1, 10 + 1): print('{} x {} =", "este é seu' ' sucessor {}.'.format(n, n - 1, n + 1)) n1", "1): print('{} x {} = {}'. format(tabuada, count, (tabuada * count))) # OUTRA", "count))) # OUTRA FORMA DE FAZER A TABUADA, count é a mesma coisa", "= float(input('Digite um valor em metros para ser convertido: ')) centimetro = metro", "= {}'. format(tabuada, count, (tabuada * count))) # OUTRA FORMA DE FAZER A", "tabuada')) contador = 0 for count in range(1, 10 + 1): print('{} x", "milimetro)) tabuada = int(input('Informe um número para apresentar sua tabuada')) contador = 0", "int(input('Informe um valor inteiro: ')) print('Esté é o número digitado {}, este é", "10: ')) media = ((n2 + n3) / 2) print('A sua média é", "+ n3) / 2) print('A sua média é {:.2f}'.format(media)) metro = float(input('Digite um", "número digitado {}, este é seu antecessor {}, este é seu' ' sucessor", "= math.sqrt(n1) # também pode ser feito como raiz = n1 ** (1/2)", "convertido: ')) centimetro = metro * 100 milimetro = metro * 1000 print('Este", "é {}, a raiz quadrada é {}.' .format(n1 * 2, n1 * 3,", "FORMA DE FAZER A TABUADA, count é a mesma coisa que X ou", "mesma coisa que X ou a mesma # coisa que um contador no", "como raiz = n1 ** (1/2) print('O dobro é {}, o triplo é", "= n1 ** (1/2) print('O dobro é {}, o triplo é {}, a", "+ 1)) n1 = int(input('Informe um valor inteiro: ')) raiz = math.sqrt(n1) #", "metros para ser convertido: ')) centimetro = metro * 100 milimetro = metro", "a 10: ')) media = ((n2 + n3) / 2) print('A sua média", "' sucessor {}.'.format(n, n - 1, n + 1)) n1 = int(input('Informe um", "é seu' ' sucessor {}.'.format(n, n - 1, n + 1)) n1 =", "{} = {}'. format(tabuada, count, (tabuada * count))) # OUTRA FORMA DE FAZER", "int(input('Informe um valor inteiro: ')) raiz = math.sqrt(n1) # também pode ser feito", "digitado {}, este é seu antecessor {}, este é seu' ' sucessor {}.'.format(n,", "= metro * 1000 print('Este valor de {} metros, é igual a {:.0f}", "in range(1, 10 + 1): print('{} x {} = {}'. format(tabuada, count, (tabuada", "count, (tabuada * count))) # OUTRA FORMA DE FAZER A TABUADA, count é", "math n = int(input('Informe um valor inteiro: ')) print('Esté é o número digitado", "n + 1)) n1 = int(input('Informe um valor inteiro: ')) raiz = math.sqrt(n1)", "n3 = float(input('Informe uma segunda nota de 0 a 10: ')) media =", "* 2, n1 * 3, raiz)) n2 = float(input('Informe uma nota de 0", "n - 1, n + 1)) n1 = int(input('Informe um valor inteiro: '))", "= float(input('Informe uma nota de 0 a 10: ')) n3 = float(input('Informe uma", "0 a 10: ')) n3 = float(input('Informe uma segunda nota de 0 a", "ser convertido: ')) centimetro = metro * 100 milimetro = metro * 1000", "float(input('Informe uma segunda nota de 0 a 10: ')) media = ((n2 +", "nota de 0 a 10: ')) media = ((n2 + n3) / 2)", "A TABUADA, count é a mesma coisa que X ou a mesma #", "for count in range(1, 10 + 1): print('{} x {} = {}'. format(tabuada,", "um valor inteiro: ')) print('Esté é o número digitado {}, este é seu", "* 1000 print('Este valor de {} metros, é igual a {:.0f} centimetros e", "a mesma coisa que X ou a mesma # coisa que um contador", "((n2 + n3) / 2) print('A sua média é {:.2f}'.format(media)) metro = float(input('Digite", "/ 2) print('A sua média é {:.2f}'.format(media)) metro = float(input('Digite um valor em", "coisa que X ou a mesma # coisa que um contador no WHILE", "seu' ' sucessor {}.'.format(n, n - 1, n + 1)) n1 = int(input('Informe", "10 + 1): print('{} x {} = {}'. format(tabuada, count, (tabuada * count)))", "(tabuada * count))) # OUTRA FORMA DE FAZER A TABUADA, count é a", "é {:.2f}'.format(media)) metro = float(input('Digite um valor em metros para ser convertido: '))", "feito como raiz = n1 ** (1/2) print('O dobro é {}, o triplo", "{:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada = int(input('Informe um número para apresentar sua tabuada'))", "raiz)) n2 = float(input('Informe uma nota de 0 a 10: ')) n3 =", "centimetro, milimetro)) tabuada = int(input('Informe um número para apresentar sua tabuada')) contador =", "este é seu antecessor {}, este é seu' ' sucessor {}.'.format(n, n -", "')) media = ((n2 + n3) / 2) print('A sua média é {:.2f}'.format(media))", "também pode ser feito como raiz = n1 ** (1/2) print('O dobro é", "x {} = {}'. format(tabuada, count, (tabuada * count))) # OUTRA FORMA DE", "= ((n2 + n3) / 2) print('A sua média é {:.2f}'.format(media)) metro =", "1, n + 1)) n1 = int(input('Informe um valor inteiro: ')) raiz =", "n1 * 3, raiz)) n2 = float(input('Informe uma nota de 0 a 10:", "centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada = int(input('Informe um número para apresentar", "{} metros, é igual a {:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada", "é {}.' .format(n1 * 2, n1 * 3, raiz)) n2 = float(input('Informe uma", "1000 print('Este valor de {} metros, é igual a {:.0f} centimetros e {:.0f}'", "= int(input('Informe um valor inteiro: ')) print('Esté é o número digitado {}, este", "é o número digitado {}, este é seu antecessor {}, este é seu'", "raiz quadrada é {}.' .format(n1 * 2, n1 * 3, raiz)) n2 =", "{}'. format(tabuada, count, (tabuada * count))) # OUTRA FORMA DE FAZER A TABUADA,", "+ 1): print('{} x {} = {}'. format(tabuada, count, (tabuada * count))) #", "valor em metros para ser convertido: ')) centimetro = metro * 100 milimetro", "(1/2) print('O dobro é {}, o triplo é {}, a raiz quadrada é", "número para apresentar sua tabuada')) contador = 0 for count in range(1, 10", "DE FAZER A TABUADA, count é a mesma coisa que X ou a", "é a mesma coisa que X ou a mesma # coisa que um", "sucessor {}.'.format(n, n - 1, n + 1)) n1 = int(input('Informe um valor", "{}, este é seu antecessor {}, este é seu' ' sucessor {}.'.format(n, n", "nota de 0 a 10: ')) n3 = float(input('Informe uma segunda nota de", "de 0 a 10: ')) n3 = float(input('Informe uma segunda nota de 0", "metro * 100 milimetro = metro * 1000 print('Este valor de {} metros,", "{:.2f}'.format(media)) metro = float(input('Digite um valor em metros para ser convertido: ')) centimetro", "segunda nota de 0 a 10: ')) media = ((n2 + n3) /", "math.sqrt(n1) # também pode ser feito como raiz = n1 ** (1/2) print('O", "media = ((n2 + n3) / 2) print('A sua média é {:.2f}'.format(media)) metro", "n2 = float(input('Informe uma nota de 0 a 10: ')) n3 = float(input('Informe", "milimetro = metro * 1000 print('Este valor de {} metros, é igual a", "raiz = math.sqrt(n1) # também pode ser feito como raiz = n1 **", "o número digitado {}, este é seu antecessor {}, este é seu' '", "contador = 0 for count in range(1, 10 + 1): print('{} x {}", "quadrada é {}.' .format(n1 * 2, n1 * 3, raiz)) n2 = float(input('Informe", "= int(input('Informe um número para apresentar sua tabuada')) contador = 0 for count", "OUTRA FORMA DE FAZER A TABUADA, count é a mesma coisa que X", "e {:.0f}' 'milimetros.'.format(metro, centimetro, milimetro)) tabuada = int(input('Informe um número para apresentar sua", "import math n = int(input('Informe um valor inteiro: ')) print('Esté é o número", "metro = float(input('Digite um valor em metros para ser convertido: ')) centimetro =", "'milimetros.'.format(metro, centimetro, milimetro)) tabuada = int(input('Informe um número para apresentar sua tabuada')) contador", "para apresentar sua tabuada')) contador = 0 for count in range(1, 10 +", "10: ')) n3 = float(input('Informe uma segunda nota de 0 a 10: '))", "{}.' .format(n1 * 2, n1 * 3, raiz)) n2 = float(input('Informe uma nota", "inteiro: ')) raiz = math.sqrt(n1) # também pode ser feito como raiz =", "triplo é {}, a raiz quadrada é {}.' .format(n1 * 2, n1 *", "')) n3 = float(input('Informe uma segunda nota de 0 a 10: ')) media", "ser feito como raiz = n1 ** (1/2) print('O dobro é {}, o", "TABUADA, count é a mesma coisa que X ou a mesma # coisa", "n3) / 2) print('A sua média é {:.2f}'.format(media)) metro = float(input('Digite um valor", "a raiz quadrada é {}.' .format(n1 * 2, n1 * 3, raiz)) n2", "3, raiz)) n2 = float(input('Informe uma nota de 0 a 10: ')) n3", "1)) n1 = int(input('Informe um valor inteiro: ')) raiz = math.sqrt(n1) # também", "{}, o triplo é {}, a raiz quadrada é {}.' .format(n1 * 2,", "2, n1 * 3, raiz)) n2 = float(input('Informe uma nota de 0 a", "= float(input('Informe uma segunda nota de 0 a 10: ')) media = ((n2", "- 1, n + 1)) n1 = int(input('Informe um valor inteiro: ')) raiz", "')) raiz = math.sqrt(n1) # também pode ser feito como raiz = n1", "raiz = n1 ** (1/2) print('O dobro é {}, o triplo é {},", "pode ser feito como raiz = n1 ** (1/2) print('O dobro é {},", "100 milimetro = metro * 1000 print('Este valor de {} metros, é igual", "* 100 milimetro = metro * 1000 print('Este valor de {} metros, é", "a 10: ')) n3 = float(input('Informe uma segunda nota de 0 a 10:", "print('Este valor de {} metros, é igual a {:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro,", "é seu antecessor {}, este é seu' ' sucessor {}.'.format(n, n - 1,", "n1 ** (1/2) print('O dobro é {}, o triplo é {}, a raiz", "print('{} x {} = {}'. format(tabuada, count, (tabuada * count))) # OUTRA FORMA", "inteiro: ')) print('Esté é o número digitado {}, este é seu antecessor {},", "valor de {} metros, é igual a {:.0f} centimetros e {:.0f}' 'milimetros.'.format(metro, centimetro,", "é {}, o triplo é {}, a raiz quadrada é {}.' .format(n1 *", "range(1, 10 + 1): print('{} x {} = {}'. format(tabuada, count, (tabuada *", "count é a mesma coisa que X ou a mesma # coisa que", "<reponame>vnnstar/Python-Mundo1-CursoEmVideo<filename>ex08-BibliotecaMath.py import math n = int(input('Informe um valor inteiro: ')) print('Esté é o", "sua média é {:.2f}'.format(media)) metro = float(input('Digite um valor em metros para ser", "float(input('Informe uma nota de 0 a 10: ')) n3 = float(input('Informe uma segunda", "')) centimetro = metro * 100 milimetro = metro * 1000 print('Este valor", "em metros para ser convertido: ')) centimetro = metro * 100 milimetro =", "count in range(1, 10 + 1): print('{} x {} = {}'. format(tabuada, count,", "* count))) # OUTRA FORMA DE FAZER A TABUADA, count é a mesma", "= metro * 100 milimetro = metro * 1000 print('Este valor de {}", "dobro é {}, o triplo é {}, a raiz quadrada é {}.' .format(n1" ]
[ "of `ListViewer`. Color items can be selected in any of the usual ways", "g = event.message.infoPtr command = event.message.command if command == cmNewColorItem: self._items = g.items", "'ColorItemList' def __init__(self, bounds, scrollBar, items): super().__init__(bounds, 1, 0, scrollBar) self._items = items", "message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def getText(self, item, maxChars): curItem = self._items[item] return curItem.name[:maxChars]", "evBroadcast: g = event.message.infoPtr command = event.message.command if command == cmNewColorItem: self._items =", ":param item: Item number to focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem", "handleEvent(self, event): super().handleEvent(event) if event.what == evBroadcast: g = event.message.infoPtr command = event.message.command", "super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem = self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def", "user can select and change the color assignments from available palettes with immediate", "items): super().__init__(bounds, 1, 0, scrollBar) self._items = items self.eventMask |= evBroadcast self.setRange(len(items)) def", "cmSaveColorIndex, item) curItem = self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def getText(self, item, maxChars):", "from available palettes with immediate effect on the screen. `ColorItemList` is a simpler", "(by mouse or keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides the `ListViewer` event handler. \"\"\"", "= event.message.infoPtr command = event.message.command if command == cmNewColorItem: self._items = g.items self.setRange(len(g.items))", "handler. \"\"\" name = 'ColorItemList' def __init__(self, bounds, scrollBar, items): super().__init__(bounds, 1, 0,", "provide viewers and dialog boxes from which the user can select and change", "1, 0, scrollBar) self._items = items self.eventMask |= evBroadcast self.setRange(len(items)) def focusItem(self, item):", "= logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`,", "vindauga.misc.message import message from .list_viewer import ListViewer logger = logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\"", "Color items can be selected in any of the usual ways (by mouse", "import evBroadcast from vindauga.misc.message import message from .list_viewer import ListViewer logger = logging.getLogger(__name__)", "viewing and selecting single color items rather than groups of colors. Like `ColorGroupList`,", "self.eventMask |= evBroadcast self.setRange(len(items)) def focusItem(self, item): \"\"\" Selects the given item by", "the usual ways (by mouse or keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides the `ListViewer`", "|= evBroadcast self.setRange(len(items)) def focusItem(self, item): \"\"\" Selects the given item by calling", "def focusItem(self, item): \"\"\" Selects the given item by calling `super().focusItem(item)`, then broadcasts", "`ColorGroupList`, `ColorItemList` and `ColorDialog` provide viewers and dialog boxes from which the user", "curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event) if event.what == evBroadcast: g = event.message.infoPtr command", "`ColorGroupList` for viewing and selecting single color items rather than groups of colors.", "evBroadcast self.setRange(len(items)) def focusItem(self, item): \"\"\" Selects the given item by calling `super().focusItem(item)`,", "then broadcasts a `cmNewColorIndex` event. :param item: Item number to focus \"\"\" super().focusItem(item)", "logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`,", "`MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog` provide viewers and dialog boxes from which", "super().handleEvent(event) if event.what == evBroadcast: g = event.message.infoPtr command = event.message.command if command", "`ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog` provide viewers and dialog boxes from which the", "`ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog` provide viewers and dialog boxes", "event handler. \"\"\" name = 'ColorItemList' def __init__(self, bounds, scrollBar, items): super().__init__(bounds, 1,", "overrides the `ListViewer` event handler. \"\"\" name = 'ColorItemList' def __init__(self, bounds, scrollBar,", "maxChars): curItem = self._items[item] return curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event) if event.what ==", "items self.eventMask |= evBroadcast self.setRange(len(items)) def focusItem(self, item): \"\"\" Selects the given item", "immediate effect on the screen. `ColorItemList` is a simpler variant of `ColorGroupList` for", "the screen. `ColorItemList` is a simpler variant of `ColorGroupList` for viewing and selecting", "vindauga.constants.event_codes import evBroadcast from vindauga.misc.message import message from .list_viewer import ListViewer logger =", "the `ListViewer` event handler. \"\"\" name = 'ColorItemList' def __init__(self, bounds, scrollBar, items):", "color items rather than groups of colors. Like `ColorGroupList`, `ColorItemList` is specialized derivative", "interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog` provide viewers", "`ColorGroupList`, `ColorItemList` is specialized derivative of `ListViewer`. Color items can be selected in", "scrollBar, items): super().__init__(bounds, 1, 0, scrollBar) self._items = items self.eventMask |= evBroadcast self.setRange(len(items))", "effect on the screen. `ColorItemList` is a simpler variant of `ColorGroupList` for viewing", "return curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event) if event.what == evBroadcast: g = event.message.infoPtr", "self.setRange(len(items)) def focusItem(self, item): \"\"\" Selects the given item by calling `super().focusItem(item)`, then", "event): super().handleEvent(event) if event.what == evBroadcast: g = event.message.infoPtr command = event.message.command if", "items rather than groups of colors. Like `ColorGroupList`, `ColorItemList` is specialized derivative of", "calling `super().focusItem(item)`, then broadcasts a `cmNewColorIndex` event. :param item: Item number to focus", "single color items rather than groups of colors. Like `ColorGroupList`, `ColorItemList` is specialized", "\"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem = self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index)", "is a simpler variant of `ColorGroupList` for viewing and selecting single color items", "ColorItemList(ListViewer): \"\"\" The interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and", "cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import evBroadcast from vindauga.misc.message import message from .list_viewer", "a simpler variant of `ColorGroupList` for viewing and selecting single color items rather", "than groups of colors. Like `ColorGroupList`, `ColorItemList` is specialized derivative of `ListViewer`. Color", "given item by calling `super().focusItem(item)`, then broadcasts a `cmNewColorIndex` event. :param item: Item", "logger = logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`,", "evBroadcast from vindauga.misc.message import message from .list_viewer import ListViewer logger = logging.getLogger(__name__) class", "on the screen. `ColorItemList` is a simpler variant of `ColorGroupList` for viewing and", "and dialog boxes from which the user can select and change the color", "select and change the color assignments from available palettes with immediate effect on", "`cmNewColorIndex` event. :param item: Item number to focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex,", "can select and change the color assignments from available palettes with immediate effect", "simpler variant of `ColorGroupList` for viewing and selecting single color items rather than", "or keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides the `ListViewer` event handler. \"\"\" name =", "getText(self, item, maxChars): curItem = self._items[item] return curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event) if", "import message from .list_viewer import ListViewer logger = logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The", "and selecting single color items rather than groups of colors. Like `ColorGroupList`, `ColorItemList`", "boxes from which the user can select and change the color assignments from", "which the user can select and change the color assignments from available palettes", "Like `ColorGroupList`, `ColorItemList` is specialized derivative of `ListViewer`. Color items can be selected", "`ColorItemList` and `ColorDialog` provide viewers and dialog boxes from which the user can", "available palettes with immediate effect on the screen. `ColorItemList` is a simpler variant", "\"\"\" Selects the given item by calling `super().focusItem(item)`, then broadcasts a `cmNewColorIndex` event.", "-*- coding: utf-8 -*- import logging from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from", "`super().focusItem(item)`, then broadcasts a `cmNewColorIndex` event. :param item: Item number to focus \"\"\"", "mouse or keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides the `ListViewer` event handler. \"\"\" name", "-*- import logging from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import evBroadcast", "dialog boxes from which the user can select and change the color assignments", "selecting single color items rather than groups of colors. Like `ColorGroupList`, `ColorItemList` is", "`ColorItemList` is specialized derivative of `ListViewer`. Color items can be selected in any", "evBroadcast, cmSaveColorIndex, item) curItem = self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def getText(self, item,", "import logging from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import evBroadcast from", "if event.what == evBroadcast: g = event.message.infoPtr command = event.message.command if command ==", "event.message.infoPtr command = event.message.command if command == cmNewColorItem: self._items = g.items self.setRange(len(g.items)) self.focusItem(g.index)", "`ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog` provide viewers and dialog", "cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import evBroadcast from vindauga.misc.message import message from .list_viewer import", "color assignments from available palettes with immediate effect on the screen. `ColorItemList` is", "= self._items[item] return curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event) if event.what == evBroadcast: g", "event.what == evBroadcast: g = event.message.infoPtr command = event.message.command if command == cmNewColorItem:", "= self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def getText(self, item, maxChars): curItem = self._items[item]", "screen. `ColorItemList` is a simpler variant of `ColorGroupList` for viewing and selecting single", "colors. Like `ColorGroupList`, `ColorItemList` is specialized derivative of `ListViewer`. Color items can be", "= 'ColorItemList' def __init__(self, bounds, scrollBar, items): super().__init__(bounds, 1, 0, scrollBar) self._items =", "rather than groups of colors. Like `ColorGroupList`, `ColorItemList` is specialized derivative of `ListViewer`.", "palettes with immediate effect on the screen. `ColorItemList` is a simpler variant of", "cmNewColorIndex, curItem.index) def getText(self, item, maxChars): curItem = self._items[item] return curItem.name[:maxChars] def handleEvent(self,", "curItem = self._items[item] return curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event) if event.what == evBroadcast:", "derivative of `ListViewer`. Color items can be selected in any of the usual", "`ColorItemList` is a simpler variant of `ColorGroupList` for viewing and selecting single color", "`ColorItemList` overrides the `ListViewer` event handler. \"\"\" name = 'ColorItemList' def __init__(self, bounds,", "message from .list_viewer import ListViewer logger = logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The interrelated", "variant of `ColorGroupList` for viewing and selecting single color items rather than groups", "== evBroadcast: g = event.message.infoPtr command = event.message.command if command == cmNewColorItem: self._items", "vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import evBroadcast from vindauga.misc.message import message", "focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem = self._items[item] message(self.owner, evBroadcast, cmNewColorIndex,", "the color assignments from available palettes with immediate effect on the screen. `ColorItemList`", "cmNewColorItem from vindauga.constants.event_codes import evBroadcast from vindauga.misc.message import message from .list_viewer import ListViewer", "number to focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem = self._items[item] message(self.owner,", "from vindauga.misc.message import message from .list_viewer import ListViewer logger = logging.getLogger(__name__) class ColorItemList(ListViewer):", "item: Item number to focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem =", "logging from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import evBroadcast from vindauga.misc.message", "any of the usual ways (by mouse or keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides", "broadcasts a `cmNewColorIndex` event. :param item: Item number to focus \"\"\" super().focusItem(item) message(self.owner,", "of the usual ways (by mouse or keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides the", "ListViewer logger = logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`,", "usual ways (by mouse or keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides the `ListViewer` event", "by calling `super().focusItem(item)`, then broadcasts a `cmNewColorIndex` event. :param item: Item number to", "import ListViewer logger = logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The interrelated classes `ColorItem`, `ColorGroup`,", "and `ColorDialog` provide viewers and dialog boxes from which the user can select", "= items self.eventMask |= evBroadcast self.setRange(len(items)) def focusItem(self, item): \"\"\" Selects the given", "`ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog` provide viewers and dialog boxes from", "for viewing and selecting single color items rather than groups of colors. Like", "Unlike `ColorGroupList`, `ColorItemList` overrides the `ListViewer` event handler. \"\"\" name = 'ColorItemList' def", "utf-8 -*- import logging from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import", "message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem = self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def getText(self,", "curItem = self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def getText(self, item, maxChars): curItem =", "and change the color assignments from available palettes with immediate effect on the", "viewers and dialog boxes from which the user can select and change the", "event. :param item: Item number to focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item)", "coding: utf-8 -*- import logging from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes", "`ColorDialog` provide viewers and dialog boxes from which the user can select and", "name = 'ColorItemList' def __init__(self, bounds, scrollBar, items): super().__init__(bounds, 1, 0, scrollBar) self._items", "curItem.index) def getText(self, item, maxChars): curItem = self._items[item] return curItem.name[:maxChars] def handleEvent(self, event):", "evBroadcast, cmNewColorIndex, curItem.index) def getText(self, item, maxChars): curItem = self._items[item] return curItem.name[:maxChars] def", "Selects the given item by calling `super().focusItem(item)`, then broadcasts a `cmNewColorIndex` event. :param", "selected in any of the usual ways (by mouse or keyboard). Unlike `ColorGroupList`,", "item, maxChars): curItem = self._items[item] return curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event) if event.what", "self._items = items self.eventMask |= evBroadcast self.setRange(len(items)) def focusItem(self, item): \"\"\" Selects the", "from .list_viewer import ListViewer logger = logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The interrelated classes", "\"\"\" The interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog`", "<reponame>gabbpuy/vindauga<gh_stars>1-10 # -*- coding: utf-8 -*- import logging from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex,", "items can be selected in any of the usual ways (by mouse or", "to focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem = self._items[item] message(self.owner, evBroadcast,", "of colors. Like `ColorGroupList`, `ColorItemList` is specialized derivative of `ListViewer`. Color items can", "`ListViewer` event handler. \"\"\" name = 'ColorItemList' def __init__(self, bounds, scrollBar, items): super().__init__(bounds,", "classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog` provide viewers and", "assignments from available palettes with immediate effect on the screen. `ColorItemList` is a", "scrollBar) self._items = items self.eventMask |= evBroadcast self.setRange(len(items)) def focusItem(self, item): \"\"\" Selects", "item): \"\"\" Selects the given item by calling `super().focusItem(item)`, then broadcasts a `cmNewColorIndex`", "__init__(self, bounds, scrollBar, items): super().__init__(bounds, 1, 0, scrollBar) self._items = items self.eventMask |=", "be selected in any of the usual ways (by mouse or keyboard). Unlike", "def getText(self, item, maxChars): curItem = self._items[item] return curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event)", "specialized derivative of `ListViewer`. Color items can be selected in any of the", "class ColorItemList(ListViewer): \"\"\" The interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList`", "groups of colors. Like `ColorGroupList`, `ColorItemList` is specialized derivative of `ListViewer`. Color items", "import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import evBroadcast from vindauga.misc.message import message from", "from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem from vindauga.constants.event_codes import evBroadcast from vindauga.misc.message import", "in any of the usual ways (by mouse or keyboard). Unlike `ColorGroupList`, `ColorItemList`", "from vindauga.constants.event_codes import evBroadcast from vindauga.misc.message import message from .list_viewer import ListViewer logger", "def __init__(self, bounds, scrollBar, items): super().__init__(bounds, 1, 0, scrollBar) self._items = items self.eventMask", "focusItem(self, item): \"\"\" Selects the given item by calling `super().focusItem(item)`, then broadcasts a", "a `cmNewColorIndex` event. :param item: Item number to focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast,", "The interrelated classes `ColorItem`, `ColorGroup`, `ColorSelector`, `MonoSelector`, `ColorDisplay`, `ColorGroupList`, `ColorItemList` and `ColorDialog` provide", "super().__init__(bounds, 1, 0, scrollBar) self._items = items self.eventMask |= evBroadcast self.setRange(len(items)) def focusItem(self,", "self._items[item] return curItem.name[:maxChars] def handleEvent(self, event): super().handleEvent(event) if event.what == evBroadcast: g =", "0, scrollBar) self._items = items self.eventMask |= evBroadcast self.setRange(len(items)) def focusItem(self, item): \"\"\"", "item by calling `super().focusItem(item)`, then broadcasts a `cmNewColorIndex` event. :param item: Item number", "\"\"\" name = 'ColorItemList' def __init__(self, bounds, scrollBar, items): super().__init__(bounds, 1, 0, scrollBar)", "keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides the `ListViewer` event handler. \"\"\" name = 'ColorItemList'", ".list_viewer import ListViewer logger = logging.getLogger(__name__) class ColorItemList(ListViewer): \"\"\" The interrelated classes `ColorItem`,", "bounds, scrollBar, items): super().__init__(bounds, 1, 0, scrollBar) self._items = items self.eventMask |= evBroadcast", "command = event.message.command if command == cmNewColorItem: self._items = g.items self.setRange(len(g.items)) self.focusItem(g.index) self.drawView()", "`ListViewer`. Color items can be selected in any of the usual ways (by", "change the color assignments from available palettes with immediate effect on the screen.", "item) curItem = self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def getText(self, item, maxChars): curItem", "ways (by mouse or keyboard). Unlike `ColorGroupList`, `ColorItemList` overrides the `ListViewer` event handler.", "is specialized derivative of `ListViewer`. Color items can be selected in any of", "can be selected in any of the usual ways (by mouse or keyboard).", "the user can select and change the color assignments from available palettes with", "from which the user can select and change the color assignments from available", "the given item by calling `super().focusItem(item)`, then broadcasts a `cmNewColorIndex` event. :param item:", "self._items[item] message(self.owner, evBroadcast, cmNewColorIndex, curItem.index) def getText(self, item, maxChars): curItem = self._items[item] return", "def handleEvent(self, event): super().handleEvent(event) if event.what == evBroadcast: g = event.message.infoPtr command =", "# -*- coding: utf-8 -*- import logging from vindauga.constants.colors import cmSaveColorIndex, cmNewColorIndex, cmNewColorItem", "Item number to focus \"\"\" super().focusItem(item) message(self.owner, evBroadcast, cmSaveColorIndex, item) curItem = self._items[item]", "`ColorGroupList`, `ColorItemList` overrides the `ListViewer` event handler. \"\"\" name = 'ColorItemList' def __init__(self,", "of `ColorGroupList` for viewing and selecting single color items rather than groups of", "with immediate effect on the screen. `ColorItemList` is a simpler variant of `ColorGroupList`" ]
[ "if db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine == 'Postgres': return PostgresDatasetMetadataCache(*args,", "''' if db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine == 'Postgres': return", "return subclass of DatasetMetadataCache for specified db_engine ''' if db_engine == 'SQLite': return", "PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class factory function", "subclass of DatasetMetadataCache for specified db_engine ''' if db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args,", "settings, DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def", "._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import", "function to return subclass of DatasetMetadataCache for specified db_engine ''' if db_engine ==", "**kwargs): ''' Class factory function to return subclass of DatasetMetadataCache for specified db_engine", "'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine == 'Postgres': return PostgresDatasetMetadataCache(*args, **kwargs) else: raise", "Jul. 2018 @author: Alex ''' from ._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset, Distribution from", "from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class factory function to", "DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite',", "specified db_engine ''' if db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine ==", "to return subclass of DatasetMetadataCache for specified db_engine ''' if db_engine == 'SQLite':", "factory function to return subclass of DatasetMetadataCache for specified db_engine ''' if db_engine", "''' Created on 20 Jul. 2018 @author: Alex ''' from ._dataset_metadata_cache import settings,", "== 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine == 'Postgres': return PostgresDatasetMetadataCache(*args, **kwargs) else:", "Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs):", "of DatasetMetadataCache for specified db_engine ''' if db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs)", "SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine == 'Postgres': return PostgresDatasetMetadataCache(*args, **kwargs) else: raise BaseException('Unhandled db_engine", "*args, **kwargs): ''' Class factory function to return subclass of DatasetMetadataCache for specified", "import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class factory", "''' Class factory function to return subclass of DatasetMetadataCache for specified db_engine '''", "db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine == 'Postgres': return PostgresDatasetMetadataCache(*args, **kwargs)", "Dataset, Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args,", "import settings, DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache", "def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class factory function to return subclass of DatasetMetadataCache", "get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class factory function to return subclass of DatasetMetadataCache for", "from ._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache", "on 20 Jul. 2018 @author: Alex ''' from ._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset,", "Created on 20 Jul. 2018 @author: Alex ''' from ._dataset_metadata_cache import settings, DatasetMetadataCache,", "Class factory function to return subclass of DatasetMetadataCache for specified db_engine ''' if", "**kwargs) elif db_engine == 'Postgres': return PostgresDatasetMetadataCache(*args, **kwargs) else: raise BaseException('Unhandled db_engine \"{}\"'.format(db_engine))", "._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class", "20 Jul. 2018 @author: Alex ''' from ._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset, Distribution", "import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class factory function to return subclass", "2018 @author: Alex ''' from ._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache", "DatasetMetadataCache for specified db_engine ''' if db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif", "from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from ._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): '''", "db_engine ''' if db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine == 'Postgres':", "SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class factory function to return subclass of", "for specified db_engine ''' if db_engine == 'SQLite': return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine", "@author: Alex ''' from ._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache import", "return SQLiteDatasetMetadataCache(*args, **kwargs) elif db_engine == 'Postgres': return PostgresDatasetMetadataCache(*args, **kwargs) else: raise BaseException('Unhandled", "._sqlite_dataset_metadata_cache import SQLiteDatasetMetadataCache def get_dataset_metadata_cache(db_engine='SQLite', *args, **kwargs): ''' Class factory function to return", "Alex ''' from ._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache", "''' from ._dataset_metadata_cache import settings, DatasetMetadataCache, Dataset, Distribution from ._postgres_dataset_metadata_cache import PostgresDatasetMetadataCache from" ]
[ "= defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) # For each position for pos in", "in refProt]) # In blocks of 70, while smaller than the length of", "+ refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string: same as ARP string", "defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int))) # For each sequence for", "Create PAN string: same as ARP string PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;' +", "[seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher", "+ '&mdash;'*(pos - last_pos -1 - i) + color['PAN'][1] + refProt[pos-1] last_pos =", "vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate", "== 'strong': core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx", "in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]): PAN_str", "for mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos = pos - i FOC_str", "in zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]] += 1 #", "del seqMUT[pos] return seqMUT, vaccSample def map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample):", "len(refProt): # Create string of reference protein (taking 70 AA) refProtStr = refProt[i:i+70]", "refProt[i:i+70] count = 0 # For each binding core and class for core,", "fragment: same elif core[1] in range(i, i + 70): core = [idx -i", "seqMut[pos]): PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(pos - last_pos -1 - i)", "in seqMut[pos]): ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(pos - last_pos -1 -", "coreIdxs, coreClass = getBindingCore(options, refProt) # Map mutations seqMut, vaccSample = mapMutations(data, refProt,", "mut_dict[pos][mut][vacc] > 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp", "ARP_str + color['ARP'][0] + '&mdash;'*(70 - last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC", "and type for pos in list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()): for vacc in", "return MUT_stats def mapMutations(data, refProt, options): # Initialize outputs seqMUT = defaultdict(lambda: defaultdict(lambda", "range(i, i + 70): core = [idx -i for idx in core] core", "pos - i PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(70 - last_pos) +", "while i < len(refProt): # Create string of reference protein (taking 70 AA)", "append if AA is not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 # Filter positions where", "ptm in list(seqMut[pos].keys()): if 'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive", "of the core overlaps with that fragment if core[0] in range(i, i +", "del seqMUT[pos][ptm] if len(seqMUT[pos]) < 1: del seqMUT[pos] return seqMUT, vaccSample def map2HTML(options,", "PAN_str + color['PAN'][0] + '&mdash;'*(70 - last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP", "else idx for idx in core] if coreCl == 'strong': refProtStr = color['strongBinder'][0]", "test MUT_stats = statisticalTest(options, seqMut, vaccSample, refProt) # Create HTML output map2HTML(options, coreIdxs,", "seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test", "= defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos = 0 for pos in range(i,i+70): if pos", "+ refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # Append to", "Compute Fisher exact test MUT_stats = statisticalTest(options, seqMut, vaccSample, refProt) # Create HTML", "getBindingCore, getRandomColor def statisticalTest(options, seqMut, vaccSample, refProt): # Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda", "pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]): PAN_str = PAN_str +", "''.join([refProt[pos] for pos in refProt]) # In blocks of 70, while smaller than", "smaller than the length of the protein of reference i = 0 while", "color['PAN'][0] + '&mdash;'*(70 - last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string, highlighting", "-i for idx in core] if coreCl == 'strong': refProtStr = refProtStr[0:core[0]] +", "segment, update idx and highlight based on class else: if coreCl == 'strong':", "core = [idx -i for idx in core] if coreCl == 'strong': refProtStr", "seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test", "list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0:", "pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats def", "coreCl == 'strong': refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1]", "= refProt[i:i+70] count = 0 # For each binding core and class for", "+ color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos = 0 for pos in", "json import csv import re import random import subprocess from markdown2 import Markdown", "+ \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: core = [idx -", "In blocks of 70, while smaller than the length of the protein of", "is not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 # Filter positions where there is no", "PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i -3+ 6) + \\ color['mut'][0] +", "# If there is a mutation append if AA is not refProt[pos]: seqMUT[pos][AA][seq[3]]", "'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos = 0 for pos in range(i,i+70): if pos in", "data: # Initialize: sequence with and without PTM, initial position AAseq = seq[1][2:-2]", "PAN string: same as ARP string PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1]", "core[1] in range(i, i + 70): core = [idx -i for idx in", "last_pos = pos - i PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(70 -", "vaccSample def map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample): # Initialize PTM_HTML =", "pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats def mapMutations(data, refProt, options): # Initialize outputs", "binding cores and binding core positions coreIdxs, coreClass = getBindingCore(options, refProt) # Map", "len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]] += 1 # If there is a mutation", "positon and type for pos in list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()): for vacc", "the protein of reference i = 0 while i < len(refProt): # Create", "= defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int))) # For each sequence", "position AAseq = seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '', AAseq) init_pos = int(seq[2]) #", "no samples from any of the # vaccines for pos in list(seqMUT.keys()): for", "if count == 0: # Update core idxes, and highlight based on class", "string PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos = 0 for pos", "= PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos in list(MUT_stats.keys()) and", "refProtStr = str(i+1) + '.' + '&nbsp;'*(6 -len(str(i))-1) + refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr))", "for idx in core] if coreCl == 'strong': refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]]", "positions coreIdxs, coreClass = getBindingCore(options, refProt) # Map mutations seqMut, vaccSample = mapMutations(data,", "Bio import Entrez from Bio import SeqIO from collections import defaultdict, OrderedDict from", "refProtStr = refProt[i:i+70] count = 0 # For each binding core and class", "refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count", "seqMUT, vaccSample def map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample): # Initialize PTM_HTML", "':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()) \\", "seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos]) < 1: del seqMUT[pos] return seqMUT, vaccSample def", "range(init_pos, init_pos + len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]] += 1 # If there", "= statisticalTest(options, seqMut, vaccSample, refProt) # Create HTML output map2HTML(options, coreIdxs, coreClass, refProt,", "\\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0]", "in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]): ARP_str = ARP_str + color['ARP'][0]", "class else: if coreCl == 'strong': core = [idx - i + count*(len(color['strongBinder'][0])", "vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i -3+ 6)", "FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(pos - last_pos -1 - i) +", "on class else: if coreCl == 'strong': core = [idx - i +", "in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str =", "outFile.write(line) outFile.writelines(PTM_HTML) def main(): # Read options with open('options.json','r') as inFile: options =", "Markdown() color = getRandomColor(options) refProt = ''.join([refProt[pos] for pos in refProt]) # In", "0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut", "the length of the protein of reference i = 0 while i <", "'strong': core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in", "any of the # vaccines for pos in list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()):", "PTM positon and type for pos in list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()): for", "'FOC' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC']", "based on class core = [idx -i for idx in core] if coreCl", "FOC string, highlighting positions of PTMs, and append FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;'", "# Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i += 70 # Print and save", "# Compute Fisher exact test MUT_stats = statisticalTest(options, seqMut, vaccSample, refProt) # Create", "if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]): PAN_str = PAN_str", "= pos - i ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(70 - last_pos)", "\\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]]", "= ARP_str + color['ARP'][0] + '&mdash;'*(70 - last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create", "- last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string, highlighting positions of PTMs,", "= color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos = 0 for pos in range(i,i+70):", "each PTM positon and type for pos in list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()):", "color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string, highlighting positions of PTMs, and append FOC_str", "color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: core", "numpy as np import sys import os import json import csv import re", "each position for pos in range(options['pos_range'][0], options['pos_range'][1]+1): if pos in list(seqMut.keys()): for ptm", "'&mdash;'*(70 - last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string, highlighting positions of", "= stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats def mapMutations(data,", "0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut", "FOC_str + color['FOC'][0] + '&mdash;'*(70 - last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings", "init_pos = int(seq[2]) # Check for mutations for AA, pos in zip(AAnonPTM, range(init_pos,", "6) + \\ color['mut'][0] + mut + color['mut'][1] + \\ '(' + vacc", "+ refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If previous", "overlaps with that fragment if core[0] in range(i, i + 70): # If", "and class for core, coreCl in zip(coreIdxs, coreClass): # If initial position of", "import subprocess from markdown2 import Markdown from Bio import Entrez from Bio import", "'strong': refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count +=", "MUT_stats, seqMut, vaccSample): # Initialize PTM_HTML = list() markdowner = Markdown() color =", "for vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp =", "- i ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(70 - last_pos) + color['ARP'][1]", "for pos in list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()): if", "vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append to output oddsratio, pvalue =", "in data: # Initialize: sequence with and without PTM, initial position AAseq =", "seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append to output oddsratio,", "ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos", "seqMUT[pos][AA][seq[3]] += 1 # Filter positions where there is no samples from any", "+ len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]]", "= ''.join([refProt[pos] for pos in refProt]) # In blocks of 70, while smaller", "+ 70): core = [idx -i for idx in core] core = [0", "0 while i < len(refProt): # Create string of reference protein (taking 70", "PAN_mut_str = PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos in list(MUT_stats.keys())", "(taking 70 AA) refProtStr = refProt[i:i+70] count = 0 # For each binding", "- i PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(70 - last_pos) + color['PAN'][1]", "+ color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string, highlighting positions of PTMs, and append", "+ color['ARP'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['ARP'][1] + refProt[pos-1]", "[seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher", "import os import json import csv import re import random import subprocess from", "of PTMs, and append FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos =", "Initialize outputs seqMUT = defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int))) #", "zip(coreIdxs, coreClass): # If initial position of the core overlaps with that fragment", "-i for idx in core] core = [0 if idx < 0 else", "+ color['FOC'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['FOC'][1] + refProt[pos-1]", "previous hightlight if count == 0: # Update core idxes, and highlight based", "PTMs, and append ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict = defaultdict(lambda:", "if any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]): ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(pos", "data data = importData(options) # Import protein of reference refProt = reference_retreive(options['refProt']) #", "in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]): FOC_str = FOC_str + color['FOC'][0]", "seq in data: # Initialize: sequence with and without PTM, initial position AAseq", "pos - i FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(70 - last_pos) +", "mapMutations(data, refProt, options): # Initialize outputs seqMUT = defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample", "< 0.05: PAN_mut_str = PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos", "core = [idx -i for idx in core] core = [0 if idx", "defaultdict, OrderedDict from scipy import stats from utils import getBindingCore, importBindData,\\ importData, reference_retreive,", "and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio']", "test and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue", "vaccSample, refProt) # Create HTML output map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample)", "color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: core = [idx - i +", "pos in range(options['pos_range'][0], options['pos_range'][1]+1): if pos in list(seqMut.keys()): for ptm in list(seqMut[pos].keys()): if", "If previous binding core in segment, update idx and highlight based on class", "re import random import subprocess from markdown2 import Markdown from Bio import Entrez", "core] refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:]", "# For each sequence for seq in data: # Initialize: sequence with and", "count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] +", "= stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN' and 'FOC'", "in list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] >", "last_pos -1 - i) + color['PAN'][1] + refProt[pos-1] last_pos = pos - i", "ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']]", "to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio", "= oddsratio if 'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive =", "= 0 # For each binding core and class for core, coreCl in", "# If ending position of the core overlaps with the fragment: same elif", "in seqMut[pos]): PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(pos - last_pos -1 -", "color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If ending position of the core", "+ 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos = 0 for", "0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut", "list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]): FOC_str = FOC_str + color['FOC'][0] +", "seqMut[pos]): FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(pos - last_pos -1 - i)", "in range(options['pos_range'][0], options['pos_range'][1]+1): if pos in list(seqMut.keys()): for ptm in list(seqMut[pos].keys()): if 'PAN'", "if 'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']]", "string of reference protein (taking 70 AA) refProtStr = refProt[i:i+70] count = 0", "refProt]) # In blocks of 70, while smaller than the length of the", "ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and", "stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats def mapMutations(data, refProt,", "color['FOC'][0] + '&mdash;'*(70 - last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for each", "+ color['PAN'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['PAN'][1] + refProt[pos-1]", "options = json.load(inFile) # Import data data = importData(options) # Import protein of", "# Get binding cores and binding core positions coreIdxs, coreClass = getBindingCore(options, refProt)", "= importData(options) # Import protein of reference refProt = reference_retreive(options['refProt']) # Get binding", "< len(refProt): # Create string of reference protein (taking 70 AA) refProtStr =", "for mut in seqMut[pos]): ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(pos - last_pos", "# For each binding core and class for core, coreCl in zip(coreIdxs, coreClass):", "core and class for core, coreCl in zip(coreIdxs, coreClass): # If initial position", "+ len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]]", "+ color['ARP'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos =", "ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()):", "defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos = 0 for pos in range(i,i+70): if pos in", "'\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string: same as ARP string PAN_str = color['PAN'][0]", "and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) #", "each binding core and class for core, coreCl in zip(coreIdxs, coreClass): # If", "seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos = pos - i FOC_str = FOC_str +", "not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 # Filter positions where there is no samples", "refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr = refProtStr[0:core[0]]", "PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string, highlighting positions of PTMs, and append FOC_str =", "+ mut + color['mut'][1] + \\ '(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop,", "refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr = color['weakBinder'][0]", "csv import re import random import subprocess from markdown2 import Markdown from Bio", "last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string, highlighting positions of PTMs, and", "color['ARP'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos = pos", "than the length of the protein of reference i = 0 while i", "if pos in list(seqMut.keys()): for ptm in list(seqMut[pos].keys()): if 'PAN' and 'ARP' in", "+ refProtStr[core[1]:] count += 1 # Append to HTML output refProtStr = str(i+1)", "+ \\ color['mut'][0] + mut + color['mut'][1] + \\ '(' + vacc +", "of reference protein (taking 70 AA) refProtStr = refProt[i:i+70] count = 0 #", "np import sys import os import json import csv import re import random", "from markdown2 import Markdown from Bio import Entrez from Bio import SeqIO from", "0 else idx for idx in core] if coreCl == 'strong': refProtStr =", "= 0 while i < len(refProt): # Create string of reference protein (taking", "- i FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(70 - last_pos) + color['FOC'][1]", "append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] =", "if 'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']]", "pos in list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc]", "data = importData(options) # Import protein of reference refProt = reference_retreive(options['refProt']) # Get", "MUT_stats = statisticalTest(options, seqMut, vaccSample, refProt) # Create HTML output map2HTML(options, coreIdxs, coreClass,", "1 # If previous binding core in segment, update idx and highlight based", "= 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for", "count += 1 else: refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] +", "Entrez from Bio import SeqIO from collections import defaultdict, OrderedDict from scipy import", "'', AAseq) init_pos = int(seq[2]) # Check for mutations for AA, pos in", "statisticalTest(options, seqMut, vaccSample, refProt): # Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int))))", "in segment, update idx and highlight based on class else: if coreCl ==", "If ending position of the core overlaps with the fragment: same elif core[1]", "= str(i+1) + '.' + '&nbsp;'*(6 -len(str(i))-1) + refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) #", "last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for each PTM positon and type", "+= 1 else: refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1]", "core] refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:]", "cores and binding core positions coreIdxs, coreClass = getBindingCore(options, refProt) # Map mutations", "any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]): ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(pos -", "in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str = PAN_mut_str + color['red'][0] +", "idx in core] core = [0 if idx < 0 else idx for", "class core = [idx -i for idx in core] if coreCl == 'strong':", "color['mut'][1] + \\ '(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if pos", "last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC']", "binding core in segment, update idx and highlight based on class else: if", "PTMs, and append FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos = 0", "+ count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0]", "PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i += 70 # Print and", "'&mdash;'*(pos - last_pos -1 - i) + color['PAN'][1] + refProt[pos-1] last_pos = pos", "collections import defaultdict, OrderedDict from scipy import stats from utils import getBindingCore, importBindData,\\", "[idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core] refProtStr =", "seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '', AAseq) init_pos = int(seq[2]) # Check for mutations", "sequence for seq in data: # Initialize: sequence with and without PTM, initial", "refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1", "for pos in list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\", "in range(i, i + 70): # If no previous hightlight if count ==", "count == 0: # Update core idxes, and highlight based on class core", "PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i", "MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str = PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif", "seqMut[pos]): ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(pos - last_pos -1 - i)", "reference_retreive, div0, getBindingCore, getRandomColor def statisticalTest(options, seqMut, vaccSample, refProt): # Initialize MUT_stats =", "in list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC']", "in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]): ARP_str", "FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos = 0 for pos in", "list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'],", "refProt, options) # Compute Fisher exact test MUT_stats = statisticalTest(options, seqMut, vaccSample, refProt)", "\\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If ending position of the", "initial position of the core overlaps with that fragment if core[0] in range(i,", "refProtStr[core[1]:] count += 1 else: refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] +", "# Append to HTML output refProtStr = str(i+1) + '.' + '&nbsp;'*(6 -len(str(i))-1)", "as inFile: options = json.load(inFile) # Import data data = importData(options) # Import", "refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos = pos - i", "= list() markdowner = Markdown() color = getRandomColor(options) refProt = ''.join([refProt[pos] for pos", "FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(70 - last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) #", "stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN' and 'FOC' in", "# If initial position of the core overlaps with that fragment if core[0]", "import re import random import subprocess from markdown2 import Markdown from Bio import", "color['mut'][0] + mut + color['mut'][1] + \\ '(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp,", "PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(70 - last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) #", "protein (taking 70 AA) refProtStr = refProt[i:i+70] count = 0 # For each", "\\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If previous binding core in", "# Create ARP string, highlighting positions of PTMs, and append ARP_str = color['ARP'][0]", "vacc_samp, PAN_prop, PAN_samp) if pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()) \\ and", "there is no samples from any of the # vaccines for pos in", "Update index i += 70 # Print and save with open(options['html'][\"scroll-template\"], 'r') as", "any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]): FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(pos -", "1: del seqMUT[pos] return seqMUT, vaccSample def map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut,", "no previous hightlight if count == 0: # Update core idxes, and highlight", "line in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main(): # Read options with open('options.json','r') as", "and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio']", "core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core]", "+ 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i +=", "+ '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i += 70 #", "with the fragment: same elif core[1] in range(i, i + 70): core =", "+ '&nbsp;'*(6 -len(str(i))-1) + refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string: same", "'\\n' elif pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str +", "refProt, options): # Initialize outputs seqMUT = defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample =", "seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append to output oddsratio,", "to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio", "+ refProtStr[core[1]:] count += 1 else: core = [idx - i + count*(len(color['strongBinder'][0])", "-1 - i) + color['ARP'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] =", "last_pos -1 - i) + color['FOC'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['FOC']", "PAN_samp) if pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] <", "idx in core] refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1]", "if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]): FOC_str = FOC_str", "+ ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys())", "not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos]) < 1: del seqMUT[pos] return seqMUT,", "= color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos = 0 for pos in range(i,i+70):", "# If no previous hightlight if count == 0: # Update core idxes,", "AA is not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 # Filter positions where there is", "oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats", "If there is a mutation append if AA is not refProt[pos]: seqMUT[pos][AA][seq[3]] +=", "positions of PTMs, and append ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict", "import csv import re import random import subprocess from markdown2 import Markdown from", "the fragment: same elif core[1] in range(i, i + 70): core = [idx", "+= 1 else: refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:]", "MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats def mapMutations(data, refProt, options): # Initialize outputs seqMUT", "pos in list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and", "instances vaccSample[pos][seq[3]] += 1 # If there is a mutation append if AA", "import json import csv import re import random import subprocess from markdown2 import", "map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample): # Initialize PTM_HTML = list() markdowner", "# Create FOC string, highlighting positions of PTMs, and append FOC_str = color['FOC'][0]", "defaultdict(int))) last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if", "'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str =", "mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos = 0 for pos in range(i,i+70): if", "+= 1 # If there is a mutation append if AA is not", "refProtStr[core[1]:] count += 1 # If ending position of the core overlaps with", "Count instances vaccSample[pos][seq[3]] += 1 # If there is a mutation append if", "Import protein of reference refProt = reference_retreive(options['refProt']) # Get binding cores and binding", "core] if coreCl == 'strong': refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] +", "color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos = 0", "+= 1 # If previous binding core in segment, update idx and highlight", "mut in seqMut[pos]): FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(pos - last_pos -1", "+ color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 #", "color['FOC'][1] last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if", "positions of PTMs, and append FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos", "PTM_HTML = list() markdowner = Markdown() color = getRandomColor(options) refProt = ''.join([refProt[pos] for", "json.load(inFile) # Import data data = importData(options) # Import protein of reference refProt", "and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos]) < 1:", "length of the protein of reference i = 0 while i < len(refProt):", "= seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i -3+ 6) + \\", "mut in list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0: vacc_prop =", "if any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]): FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(pos", "70, while smaller than the length of the protein of reference i =", "1 else: core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx", "and 'ARP' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative =", "as inFile: with open(options['files']['mutMapJacob.html'], 'w') as outFile: for line in inFile: outFile.write(line) outFile.writelines(PTM_HTML)", "Append to HTML output refProtStr = str(i+1) + '.' + '&nbsp;'*(6 -len(str(i))-1) +", "+ color['FOC'][0] + '&mdash;'*(70 - last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for", "inFile: with open(options['files']['mutMapJacob.html'], 'w') as outFile: for line in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def", "array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] -", "range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]): ARP_str =", "'r') as inFile: with open(options['files']['mutMapJacob.html'], 'w') as outFile: for line in inFile: outFile.write(line)", "defaultdict((int))) # For each sequence for seq in data: # Initialize: sequence with", "For each position for pos in range(options['pos_range'][0], options['pos_range'][1]+1): if pos in list(seqMut.keys()): for", "reference i = 0 while i < len(refProt): # Create string of reference", "with open(options['files']['mutMapJacob.html'], 'w') as outFile: for line in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main():", "= seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '', AAseq) init_pos = int(seq[2]) # Check for", "outFile: for line in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main(): # Read options with", "# Fisher test and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue']", "i PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(70 - last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str))", "in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] -", "defaultdict(lambda : defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int))) # For each sequence for seq", "= [0 if idx < 0 else idx for idx in core] if", "importData, reference_retreive, div0, getBindingCore, getRandomColor def statisticalTest(options, seqMut, vaccSample, refProt): # Initialize MUT_stats", "random import subprocess from markdown2 import Markdown from Bio import Entrez from Bio", "+= 1 else: core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for", "# Initialize outputs seqMUT = defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int)))", "ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(pos - last_pos -1 - i) +", "idx in core] if coreCl == 'strong': refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] +", "core, coreCl in zip(coreIdxs, coreClass): # If initial position of the core overlaps", "# Create PAN string: same as ARP string PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;'", "and vacc in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str = PAN_mut_str +", "elif pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue'])", "\\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: core = [idx - i", "list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) #", "from scipy import stats from utils import getBindingCore, importBindData,\\ importData, reference_retreive, div0, getBindingCore,", "the # vaccines for pos in list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP']", "for seq in data: # Initialize: sequence with and without PTM, initial position", "idx in core] if coreCl == 'strong': refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]] +", "test and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue", "= color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 #", "range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]): PAN_str =", "'&mdash;'*(pos - last_pos -1 - i) + color['ARP'][1] + refProt[pos-1] for mut in", "# Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) # For each position", "seqMUT[pos][ptm] if len(seqMUT[pos]) < 1: del seqMUT[pos] return seqMUT, vaccSample def map2HTML(options, coreIdxs,", "+ '&mdash;'*(pos - last_pos -1 - i) + color['FOC'][1] + refProt[pos-1] for mut", "pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) +", "in seqMut[pos]): FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(pos - last_pos -1 -", "Create HTML output map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample) if __name__ ==", "pos - i ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(70 - last_pos) +", "PAN_prop, PAN_samp) if pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue']", "list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and", "seqMut[pos][mut]['FOC'] last_pos = pos - i FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(70", "AAnonPTM = re.sub('\\[.+?\\]', '', AAseq) init_pos = int(seq[2]) # Check for mutations for", "core] core = [0 if idx < 0 else idx for idx in", "= FOC_str + color['FOC'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['FOC'][1]", "= seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str =", "save with open(options['html'][\"scroll-template\"], 'r') as inFile: with open(options['files']['mutMapJacob.html'], 'w') as outFile: for line", "mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos = pos - i ARP_str = ARP_str + color['ARP'][0]", "# Update core idxes, and highlight based on class core = [idx -i", "\\ '(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if pos in list(MUT_stats.keys())", "if coreCl == 'strong': refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] +", "string: same as ARP string PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos", "PAN_str + color['PAN'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['PAN'][1] +", "0: # Update core idxes, and highlight based on class core = [idx", "if coreCl == 'strong': core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1]))", "import defaultdict, OrderedDict from scipy import stats from utils import getBindingCore, importBindData,\\ importData,", "statisticalTest(options, seqMut, vaccSample, refProt) # Create HTML output map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats,", "+ color['ARP'][0] + '&mdash;'*(70 - last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string,", "# Print and save with open(options['html'][\"scroll-template\"], 'r') as inFile: with open(options['files']['mutMapJacob.html'], 'w') as", "MUT_stats def mapMutations(data, refProt, options): # Initialize outputs seqMUT = defaultdict(lambda: defaultdict(lambda :", "count = 0 # For each binding core and class for core, coreCl", "with that fragment if core[0] in range(i, i + 70): # If no", "+ refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos = pos -", "for mut in seqMut[pos]): FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(pos - last_pos", "for mut in list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0: vacc_prop", "+ color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for each PTM positon and type for", "getBindingCore(options, refProt) # Map mutations seqMut, vaccSample = mapMutations(data, refProt, options) # Compute", "refProtStr[core[1]:] count += 1 else: refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1]", "+ color['PAN'][0] + '&mdash;'*(70 - last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string,", "seqMut, vaccSample = mapMutations(data, refProt, options) # Compute Fisher exact test MUT_stats =", "+ 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos = 0 for pos in range(i,i+70): if pos", "seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos", "int(seq[2]) # Check for mutations for AA, pos in zip(AAnonPTM, range(init_pos, init_pos +", "+ '&mdash;'*(70 - last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string, highlighting positions", "markdown2 import Markdown from Bio import Entrez from Bio import SeqIO from collections", "< 0 else idx for idx in core] if coreCl == 'strong': refProtStr", "= mapMutations(data, refProt, options) # Compute Fisher exact test MUT_stats = statisticalTest(options, seqMut,", "'&mdash;'*(70 - last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string, highlighting positions of", "> 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp =", "os import json import csv import re import random import subprocess from markdown2", "color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If", "of the protein of reference i = 0 while i < len(refProt): #", "if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]): ARP_str = ARP_str", "+ refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: core =", "refProtStr[core[1]:] count += 1 # Append to HTML output refProtStr = str(i+1) +", "init_pos + len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]] += 1 # If there is", "getRandomColor def statisticalTest(options, seqMut, vaccSample, refProt): # Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda :", "of PTMs, and append ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict =", "strings for each PTM positon and type for pos in list(mut_dict.keys()): for mut", "Import data data = importData(options) # Import protein of reference refProt = reference_retreive(options['refProt'])", "Fisher test and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] =", "# Initialize: sequence with and without PTM, initial position AAseq = seq[1][2:-2] AAnonPTM", "refProtStr[core[1]:] count += 1 # If previous binding core in segment, update idx", "importBindData,\\ importData, reference_retreive, div0, getBindingCore, getRandomColor def statisticalTest(options, seqMut, vaccSample, refProt): # Initialize", "without PTM, initial position AAseq = seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '', AAseq) init_pos", "# Map mutations seqMut, vaccSample = mapMutations(data, refProt, options) # Compute Fisher exact", "last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN']", "'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative", "count += 1 # Append to HTML output refProtStr = str(i+1) + '.'", "binding core and class for core, coreCl in zip(coreIdxs, coreClass): # If initial", "mutations seqMut, vaccSample = mapMutations(data, refProt, options) # Compute Fisher exact test MUT_stats", "and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str = PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n'", "defaultdict(lambda: defaultdict(int)))) # For each position for pos in range(options['pos_range'][0], options['pos_range'][1]+1): if pos", "- seqMut[pos][ptm]['PAN']] # Fisher test and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive,", "as ARP string PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos = 0", "'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i += 70", "i) + color['PAN'][1] + refProt[pos-1] last_pos = pos - i PAN_str = PAN_str", "ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats def mapMutations(data, refProt, options):", "options) # Compute Fisher exact test MUT_stats = statisticalTest(options, seqMut, vaccSample, refProt) #", "MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()): #", "1 else: refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count", "for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut in", "+ '&mdash;'*(70 - last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string, highlighting positions", "for mut in seqMut[pos]): PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(pos - last_pos", "Create strings for each PTM positon and type for pos in list(mut_dict.keys()): for", "PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos in list(MUT_stats.keys()) and vacc", "defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) # For each position for pos in range(options['pos_range'][0], options['pos_range'][1]+1):", "list() markdowner = Markdown() color = getRandomColor(options) refProt = ''.join([refProt[pos] for pos in", "seqMut, vaccSample, refProt) # Create HTML output map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut,", "+ refProtStr[core[1]:] count += 1 # If ending position of the core overlaps", "coreCl == 'strong': core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for", "for idx in core] core = [0 if idx < 0 else idx", "in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos = pos - i FOC_str = FOC_str", "-1 - i) + color['FOC'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] =", "highlight based on class else: if coreCl == 'strong': core = [idx -", "Filter positions where there is no samples from any of the # vaccines", "= [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core] refProtStr", "refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1", "in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] -", "70): # If no previous hightlight if count == 0: # Update core", "color['PAN'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['PAN'][1] + refProt[pos-1] last_pos", "+= 1 # Append to HTML output refProtStr = str(i+1) + '.' +", "any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]): PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(pos -", "if any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]): PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(pos", "= PAN_str + color['PAN'][0] + '&mdash;'*(70 - last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create", "0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN']", "vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc]", "\\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append to output oddsratio, pvalue", "in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]): PAN_str = PAN_str + color['PAN'][0]", "color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos = 0 for pos in range(i,i+70):", "+ '&mdash;'*(pos - last_pos -1 - i) + color['ARP'][1] + refProt[pos-1] for mut", "70 AA) refProtStr = refProt[i:i+70] count = 0 # For each binding core", "+ \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # Append to HTML output", "ARP string PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos = 0 for", "in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos = pos - i ARP_str = ARP_str", "last_pos = pos - i ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(70 -", "= [idx -i for idx in core] if coreCl == 'strong': refProtStr =", "coreCl == 'strong': refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:]", "= pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats def mapMutations(data, refProt, options): # Initialize", "< 1: del seqMUT[pos] return seqMUT, vaccSample def map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats,", "Initialize: sequence with and without PTM, initial position AAseq = seq[1][2:-2] AAnonPTM =", "= getRandomColor(options) refProt = ''.join([refProt[pos] for pos in refProt]) # In blocks of", "while smaller than the length of the protein of reference i = 0", "core = [0 if idx < 0 else idx for idx in core]", "i + 70): core = [idx -i for idx in core] core =", "ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(70 - last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) #", "MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return MUT_stats def mapMutations(data, refProt, options): #", "there is a mutation append if AA is not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1", "from any of the # vaccines for pos in list(seqMUT.keys()): for ptm in", "list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]): PAN_str = PAN_str + color['PAN'][0] +", "i FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(70 - last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str))", "seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos = pos - i ARP_str = ARP_str +", "initial position AAseq = seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '', AAseq) init_pos = int(seq[2])", "vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if pos in list(MUT_stats.keys()) and vacc in", "# Create array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\", "else: refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:]", "[idx -i for idx in core] core = [0 if idx < 0", "output map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample) if __name__ == \"__main__\": main()", "= int(seq[2]) # Check for mutations for AA, pos in zip(AAnonPTM, range(init_pos, init_pos", "# Create strings for each PTM positon and type for pos in list(mut_dict.keys()):", "count += 1 # If ending position of the core overlaps with the", "# For each position for pos in range(options['pos_range'][0], options['pos_range'][1]+1): if pos in list(seqMut.keys()):", "def statisticalTest(options, seqMut, vaccSample, refProt): # Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda : defaultdict(lambda:", "70): core = [idx -i for idx in core] core = [0 if", "refProt) # Create HTML output map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample) if", "def main(): # Read options with open('options.json','r') as inFile: options = json.load(inFile) #", "same elif core[1] in range(i, i + 70): core = [idx -i for", "defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int))) # For each sequence for seq in data:", "append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] =", "vaccines for pos in list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN'])", "ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del", "\\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str = PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) +", "+ \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr = refProtStr[0:core[0]] +", "output refProtStr = str(i+1) + '.' + '&nbsp;'*(6 -len(str(i))-1) + refProtStr + '\\n'", "count += 1 else: core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1]))", "options with open('options.json','r') as inFile: options = json.load(inFile) # Import data data =", "with open('options.json','r') as inFile: options = json.load(inFile) # Import data data = importData(options)", "Create array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN']", "of reference i = 0 while i < len(refProt): # Create string of", "pos in refProt]) # In blocks of 70, while smaller than the length", "'&mdash;'*(70 - last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for each PTM positon", "inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main(): # Read options with open('options.json','r') as inFile: options", "# Create array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\", "FOC_str + color['FOC'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['FOC'][1] +", "for ptm in list(seqMut[pos].keys()): if 'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()): # Create array", "color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] +", "refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: core = [idx", "# In blocks of 70, while smaller than the length of the protein", "in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm]", "= refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count +=", "= color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else:", "'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative", "+= 1 # If ending position of the core overlaps with the fragment:", "+= 1 # Filter positions where there is no samples from any of", "subprocess from markdown2 import Markdown from Bio import Entrez from Bio import SeqIO", "vaccSample = defaultdict(lambda: defaultdict((int))) # For each sequence for seq in data: #", "if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos])", "refProt) # Map mutations seqMut, vaccSample = mapMutations(data, refProt, options) # Compute Fisher", "core positions coreIdxs, coreClass = getBindingCore(options, refProt) # Map mutations seqMut, vaccSample =", "color['ARP'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['ARP'][1] + refProt[pos-1] for", "refProt = ''.join([refProt[pos] for pos in refProt]) # In blocks of 70, while", "last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP']", "Check for mutations for AA, pos in zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))): #", "+ 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos = 0 for pos in range(i,i+70): if pos", "+ \\ '(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if pos in", "mutations for AA, pos in zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))): # Count instances", "as outFile: for line in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main(): # Read options", "open(options['html'][\"scroll-template\"], 'r') as inFile: with open(options['files']['mutMapJacob.html'], 'w') as outFile: for line in inFile:", "= refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count +=", "for AA, pos in zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]]", "append ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int)))", "i < len(refProt): # Create string of reference protein (taking 70 AA) refProtStr", "MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive", "= color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos =", "color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos = 0 for pos in range(i,i+70): if", "in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]): FOC_str", "in core] if coreCl == 'strong': refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]]", "seqMut, vaccSample, refProt): # Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) #", "= [idx -i for idx in core] core = [0 if idx <", "Create FOC string, highlighting positions of PTMs, and append FOC_str = color['FOC'][0] +", "Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i += 70 # Print and save with", "last_pos -1 - i) + color['ARP'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['ARP']", "position of the core overlaps with the fragment: same elif core[1] in range(i,", "main(): # Read options with open('options.json','r') as inFile: options = json.load(inFile) # Import", "# Check for mutations for AA, pos in zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))):", "OrderedDict from scipy import stats from utils import getBindingCore, importBindData,\\ importData, reference_retreive, div0,", "pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for mut in seqMut[pos]):", "pos in list(seqMut.keys()): for ptm in list(seqMut[pos].keys()): if 'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()):", "== 'strong': refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count", "[idx -i for idx in core] if coreCl == 'strong': refProtStr = refProtStr[0:core[0]]", "-i -3+ 6) + \\ color['mut'][0] + mut + color['mut'][1] + \\ '('", "with open(options['html'][\"scroll-template\"], 'r') as inFile: with open(options['files']['mutMapJacob.html'], 'w') as outFile: for line in", "# Initialize PTM_HTML = list() markdowner = Markdown() color = getRandomColor(options) refProt =", "scipy import stats from utils import getBindingCore, importBindData,\\ importData, reference_retreive, div0, getBindingCore, getRandomColor", "import SeqIO from collections import defaultdict, OrderedDict from scipy import stats from utils", "+= 70 # Print and save with open(options['html'][\"scroll-template\"], 'r') as inFile: with open(options['files']['mutMapJacob.html'],", "# Import protein of reference refProt = reference_retreive(options['refProt']) # Get binding cores and", "= [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] #", "is no samples from any of the # vaccines for pos in list(seqMUT.keys()):", "1 # Append to HTML output refProtStr = str(i+1) + '.' + '&nbsp;'*(6", "= Markdown() color = getRandomColor(options) refProt = ''.join([refProt[pos] for pos in refProt]) #", "seqMut[pos][ptm]['PAN']] # Fisher test and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative])", "\\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # Append to HTML output refProtStr", "[vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append to", "defaultdict(int)))) # For each position for pos in range(options['pos_range'][0], options['pos_range'][1]+1): if pos in", "oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN'", "color['ARP'][0] + '&mdash;'*(70 - last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string, highlighting", "color['PAN'][1] + refProt[pos-1] last_pos = pos - i PAN_str = PAN_str + color['PAN'][0]", "refProtStr[core[1]:] count += 1 else: core = [idx - i + count*(len(color['strongBinder'][0]) +", "For each sequence for seq in data: # Initialize: sequence with and without", "+ color['FOC'][1] last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()):", "PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(pos - last_pos -1 - i) +", "'ARP' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP']", "color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # Append to HTML output refProtStr =", "# Filter positions where there is no samples from any of the #", "for pos in refProt]) # In blocks of 70, while smaller than the", "'&nbsp;'*(6 -len(str(i))-1) + refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string: same as", "1 # If there is a mutation append if AA is not refProt[pos]:", "color['FOC'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos = pos", "of reference refProt = reference_retreive(options['refProt']) # Get binding cores and binding core positions", "protein of reference i = 0 while i < len(refProt): # Create string", "mut in seqMut[pos]): ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(pos - last_pos -1", "for mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos = pos - i ARP_str", "HTML output map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample) if __name__ == \"__main__\":", "from utils import getBindingCore, importBindData,\\ importData, reference_retreive, div0, getBindingCore, getRandomColor def statisticalTest(options, seqMut,", "= 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['PAN'] for", "pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]): ARP_str = ARP_str +", "PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update", "for idx in core] refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\", "= seqMut[pos][mut]['ARP'] last_pos = pos - i ARP_str = ARP_str + color['ARP'][0] +", "else: core = [idx - i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in", "coreClass): # If initial position of the core overlaps with that fragment if", "core overlaps with that fragment if core[0] in range(i, i + 70): #", "and 'FOC' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative =", "HTML output refProtStr = str(i+1) + '.' + '&nbsp;'*(6 -len(str(i))-1) + refProtStr +", "for pos in range(options['pos_range'][0], options['pos_range'][1]+1): if pos in list(seqMut.keys()): for ptm in list(seqMut[pos].keys()):", "+ refProtStr[core[1]:] count += 1 else: refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]]", "If initial position of the core overlaps with that fragment if core[0] in", "- last_pos) + color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string, highlighting positions of PTMs,", "- seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append to output", "and append FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos = 0 for", "MUT_stats = defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) # For each position for pos", "core[0] in range(i, i + 70): # If no previous hightlight if count", "color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]] +", "idx < 0 else idx for idx in core] if coreCl == 'strong':", "+ color['FOC'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos =", ": defaultdict(lambda: defaultdict(int)))) # For each position for pos in range(options['pos_range'][0], options['pos_range'][1]+1): if", "= [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append", "list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if", "from collections import defaultdict, OrderedDict from scipy import stats from utils import getBindingCore,", "list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]): ARP_str = ARP_str + color['ARP'][0] +", "i) + color['ARP'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos", "seqMut, vaccSample): # Initialize PTM_HTML = list() markdowner = Markdown() color = getRandomColor(options)", "PAN_mut_str = '&nbsp;'*(pos -i -3+ 6) + \\ color['mut'][0] + mut + color['mut'][1]", "core overlaps with the fragment: same elif core[1] in range(i, i + 70):", "import Markdown from Bio import Entrez from Bio import SeqIO from collections import", "count += 1 else: refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\", "oddsratio if 'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['FOC'],", "with and without PTM, initial position AAseq = seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '',", "in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n'", "same as ARP string PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos =", "refProt = reference_retreive(options['refProt']) # Get binding cores and binding core positions coreIdxs, coreClass", "and without PTM, initial position AAseq = seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '', AAseq)", "from Bio import SeqIO from collections import defaultdict, OrderedDict from scipy import stats", "color['PAN'][1] last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if", "refProt[pos-1] last_pos = pos - i PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(70", "idx in core] refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1]", "import sys import os import json import csv import re import random import", "seqMut[pos][mut]['ARP'] last_pos = pos - i ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(70", "options['pos_range'][1]+1): if pos in list(seqMut.keys()): for ptm in list(seqMut[pos].keys()): if 'PAN' and 'ARP'", "stats from utils import getBindingCore, importBindData,\\ importData, reference_retreive, div0, getBindingCore, getRandomColor def statisticalTest(options,", "# If previous binding core in segment, update idx and highlight based on", "ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and", "Initialize PTM_HTML = list() markdowner = Markdown() color = getRandomColor(options) refProt = ''.join([refProt[pos]", "+ count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0]", "outputs seqMUT = defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int))) # For", "1 # Filter positions where there is no samples from any of the", "-len(str(i))-1) + refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string: same as ARP", "string, highlighting positions of PTMs, and append FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;' +", "import numpy as np import sys import os import json import csv import", "= seqMut[pos][mut]['FOC'] last_pos = pos - i FOC_str = FOC_str + color['FOC'][0] +", "= vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i -3+ 6) + \\ color['mut'][0] + mut", "str(i+1) + '.' + '&nbsp;'*(6 -len(str(i))-1) + refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create", "+ vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if pos in list(MUT_stats.keys()) and vacc", "+ refProt[pos-1] last_pos = pos - i PAN_str = PAN_str + color['PAN'][0] +", "list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str = PAN_mut_str", "i = 0 while i < len(refProt): # Create string of reference protein", "in core] refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] +", "len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] +", "SeqIO from collections import defaultdict, OrderedDict from scipy import stats from utils import", "len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] +", "color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr", "protein of reference refProt = reference_retreive(options['refProt']) # Get binding cores and binding core", "len(seqMUT[pos]) < 1: del seqMUT[pos] return seqMUT, vaccSample def map2HTML(options, coreIdxs, coreClass, refProt,", "based on class else: if coreCl == 'strong': core = [idx - i", "reference protein (taking 70 AA) refProtStr = refProt[i:i+70] count = 0 # For", "output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['FOC']['pvalue'] = pvalue MUT_stats[pos][ptm]['FOC']['oddsratio'] = oddsratio return", "previous binding core in segment, update idx and highlight based on class else:", "mutation append if AA is not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 # Filter positions", "'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos = 0 for pos in range(i,i+70): if pos in", "for ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']):", "else: if coreCl == 'strong': core = [idx - i + count*(len(color['strongBinder'][0]) +", "pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN' and", "that fragment if core[0] in range(i, i + 70): # If no previous", "fragment if core[0] in range(i, i + 70): # If no previous hightlight", "and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos]) < 1: del seqMUT[pos] return", "- i) + color['PAN'][1] + refProt[pos-1] last_pos = pos - i PAN_str =", "Markdown from Bio import Entrez from Bio import SeqIO from collections import defaultdict,", "0 # For each binding core and class for core, coreCl in zip(coreIdxs,", "else: refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count +=", "mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos = pos - i ARP_str =", "list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop =", "= [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] #", "+ len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]] += 1 # If there is a", "refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If previous binding", "in core] if coreCl == 'strong': refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\", "- last_pos -1 - i) + color['PAN'][1] + refProt[pos-1] last_pos = pos -", "seqMUT = defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int))) # For each", "color['PAN'][1] PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string, highlighting positions of PTMs, and append ARP_str", "seqMUT[pos] return seqMUT, vaccSample def map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample): #", "i + 70): # If no previous hightlight if count == 0: #", "+ color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string, highlighting positions of PTMs, and append", "vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str", "coreClass, refProt, MUT_stats, seqMut, vaccSample): # Initialize PTM_HTML = list() markdowner = Markdown()", "+ refProtStr[core[1]:] count += 1 # If previous binding core in segment, update", "'(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if pos in list(MUT_stats.keys()) and", "reference_retreive(options['refProt']) # Get binding cores and binding core positions coreIdxs, coreClass = getBindingCore(options,", "= [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append", "Map mutations seqMut, vaccSample = mapMutations(data, refProt, options) # Compute Fisher exact test", "- i) + color['ARP'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP']", "pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()): # Create array", "range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]): FOC_str =", "+ \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If ending position of", "+ '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string: same as ARP string PAN_str =", "+ refProtStr[core[1]:] count += 1 else: refProtStr = color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\", "Read options with open('options.json','r') as inFile: options = json.load(inFile) # Import data data", "refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # Append to HTML", "mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos = pos - i FOC_str =", ": defaultdict(int))) vaccSample = defaultdict(lambda: defaultdict((int))) # For each sequence for seq in", "Create array ptm_positive = [seqMut[pos][ptm]['ARP'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN']", "color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()):", "if AA is not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 # Filter positions where there", "idx and highlight based on class else: if coreCl == 'strong': core =", "color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos = 0 for pos in range(i,i+70): if", "- last_pos -1 - i) + color['FOC'][1] + refProt[pos-1] for mut in seqMut[pos]:", "= getBindingCore(options, refProt) # Map mutations seqMut, vaccSample = mapMutations(data, refProt, options) #", "# Update index i += 70 # Print and save with open(options['html'][\"scroll-template\"], 'r')", "core in segment, update idx and highlight based on class else: if coreCl", "of the core overlaps with the fragment: same elif core[1] in range(i, i", "coreCl in zip(coreIdxs, coreClass): # If initial position of the core overlaps with", "list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str))", "i ARP_str = ARP_str + color['ARP'][0] + '&mdash;'*(70 - last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str))", "importData(options) # Import protein of reference refProt = reference_retreive(options['refProt']) # Get binding cores", "samples from any of the # vaccines for pos in list(seqMUT.keys()): for ptm", "if mut_dict[pos][mut][vacc] > 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN']", "vaccSample[pos][seq[3]] += 1 # If there is a mutation append if AA is", "i += 70 # Print and save with open(options['html'][\"scroll-template\"], 'r') as inFile: with", "coreClass = getBindingCore(options, refProt) # Map mutations seqMut, vaccSample = mapMutations(data, refProt, options)", "pos in zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]] += 1", "positions where there is no samples from any of the # vaccines for", "type for pos in list(mut_dict.keys()): for mut in list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()):", "return seqMUT, vaccSample def map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample): # Initialize", "color['FOC'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['FOC'][1] + refProt[pos-1] for", "[vaccSample[pos]['ARP'] - seqMut[pos][ptm]['ARP'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append to", "+ 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str", "of 70, while smaller than the length of the protein of reference i", "+ \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr = color['weakBinder'][0] +", "pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut in seqMut[pos]):", "PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i += 70 # Print and save with open(options['html'][\"scroll-template\"],", "= pos - i FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(70 - last_pos)", "Fisher test and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] =", "AAseq) init_pos = int(seq[2]) # Check for mutations for AA, pos in zip(AAnonPTM,", "AAseq = seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '', AAseq) init_pos = int(seq[2]) # Check", "= re.sub('\\[.+?\\]', '', AAseq) init_pos = int(seq[2]) # Check for mutations for AA,", "vacc_samp = vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i", "core idxes, and highlight based on class core = [idx -i for idx", "PAN_str = color['PAN'][0] + 'PAN:&nbsp;&nbsp;' + color['PAN'][1] last_pos = 0 for pos in", "in core] refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] +", "AA) refProtStr = refProt[i:i+70] count = 0 # For each binding core and", "= json.load(inFile) # Import data data = importData(options) # Import protein of reference", "[0 if idx < 0 else idx for idx in core] if coreCl", "markdowner = Markdown() color = getRandomColor(options) refProt = ''.join([refProt[pos] for pos in refProt])", "= '&nbsp;'*(pos -i -3+ 6) + \\ color['mut'][0] + mut + color['mut'][1] +", "and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos]) < 1: del seqMUT[pos] return seqMUT, vaccSample", "PTM, initial position AAseq = seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]', '', AAseq) init_pos =", "for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['ARP'] for mut in", "ARP_str + color['ARP'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['ARP'][1] +", "refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string: same as ARP string PAN_str", "color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If previous binding core in segment,", "array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] -", "-3+ 6) + \\ color['mut'][0] + mut + color['mut'][1] + \\ '(' +", "+ '\\n' elif pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str", "refProt): # Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) # For each", "PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for each PTM positon and type for pos in", "= PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index", "vacc in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str = PAN_mut_str + color['red'][0]", "+ \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If previous binding core", "PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i -3+ 6) +", "and save with open(options['html'][\"scroll-template\"], 'r') as inFile: with open(options['files']['mutMapJacob.html'], 'w') as outFile: for", "def map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample): # Initialize PTM_HTML = list()", "last_pos = pos - i FOC_str = FOC_str + color['FOC'][0] + '&mdash;'*(70 -", "0.05: PAN_mut_str = PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos in", "count += 1 # If previous binding core in segment, update idx and", "append FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1] last_pos = 0 for pos", "# Fisher test and append to output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue']", "is a mutation append if AA is not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 #", "Get binding cores and binding core positions coreIdxs, coreClass = getBindingCore(options, refProt) #", "as np import sys import os import json import csv import re import", "\\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos]) < 1: del seqMUT[pos]", "'strong': refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:]", "sequence with and without PTM, initial position AAseq = seq[1][2:-2] AAnonPTM = re.sub('\\[.+?\\]',", "in list(seqMut.keys()): for ptm in list(seqMut[pos].keys()): if 'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()): #", "in zip(coreIdxs, coreClass): # If initial position of the core overlaps with that", "= 0 for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for", "'.' + '&nbsp;'*(6 -len(str(i))-1) + refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string:", "of the # vaccines for pos in list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()): if", "If no previous hightlight if count == 0: # Update core idxes, and", "and highlight based on class else: if coreCl == 'strong': core = [idx", "count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] +", "import Entrez from Bio import SeqIO from collections import defaultdict, OrderedDict from scipy", "coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample): # Initialize PTM_HTML = list() markdowner =", "seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i -3+ 6) + \\ color['mut'][0]", "= pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if 'PAN' and 'FOC' in list(seqMut[pos][ptm].keys()): # Create", "1 else: refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] +", "+ '.' + '&nbsp;'*(6 -len(str(i))-1) + refProtStr + '\\n' PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN", "color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for each PTM positon and type for pos", "update idx and highlight based on class else: if coreCl == 'strong': core", "for line in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main(): # Read options with open('options.json','r')", "= ARP_str + color['ARP'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['ARP'][1]", "i) + color['FOC'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos", "not(seqMUT[pos][ptm]['ARP'] and seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos]) <", "mut + color['mut'][1] + \\ '(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp)", "= pos - i PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(70 - last_pos)", "1 # If ending position of the core overlaps with the fragment: same", "in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main(): # Read options with open('options.json','r') as inFile:", "mapMutations(data, refProt, options) # Compute Fisher exact test MUT_stats = statisticalTest(options, seqMut, vaccSample,", "import random import subprocess from markdown2 import Markdown from Bio import Entrez from", "blocks of 70, while smaller than the length of the protein of reference", "for idx in core] refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\", "# Read options with open('options.json','r') as inFile: options = json.load(inFile) # Import data", "pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]):", "'&nbsp;'*(pos -i -3+ 6) + \\ color['mut'][0] + mut + color['mut'][1] + \\", "+ color['mut'][1] + \\ '(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop, vacc_samp, PAN_prop, PAN_samp) if", "in list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc]", "list(seqMut[pos].keys()): if 'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['ARP'],", "+ color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else:", "ARP string, highlighting positions of PTMs, and append ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;'", "list(seqMut[pos][ptm].keys()): # Create array ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'],", "outFile.writelines(PTM_HTML) def main(): # Read options with open('options.json','r') as inFile: options = json.load(inFile)", "a mutation append if AA is not refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 # Filter", "Create string of reference protein (taking 70 AA) refProtStr = refProt[i:i+70] count =", "on class core = [idx -i for idx in core] if coreCl ==", "refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1", "# vaccines for pos in list(seqMUT.keys()): for ptm in list(seqMUT[pos].keys()): if not(seqMUT[pos][ptm]['ARP'] and", "in range(i, i + 70): core = [idx -i for idx in core]", "range(i, i + 70): # If no previous hightlight if count == 0:", "PTM_HTML.append(markdowner.convert(PAN_str)) # Create ARP string, highlighting positions of PTMs, and append ARP_str =", "mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC'] last_pos = pos - i FOC_str = FOC_str + color['FOC'][0]", "+ color['PAN'][1] last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()):", "== 'strong': refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] +", "== 0: # Update core idxes, and highlight based on class core =", "Bio import SeqIO from collections import defaultdict, OrderedDict from scipy import stats from", "70 # Print and save with open(options['html'][\"scroll-template\"], 'r') as inFile: with open(options['files']['mutMapJacob.html'], 'w')", "if coreCl == 'strong': refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\", "= oddsratio return MUT_stats def mapMutations(data, refProt, options): # Initialize outputs seqMUT =", "getRandomColor(options) refProt = ''.join([refProt[pos] for pos in refProt]) # In blocks of 70,", "output oddsratio, pvalue = stats.fisher_exact([ptm_positive, ptm_negative]) MUT_stats[pos][ptm]['ARP']['pvalue'] = pvalue MUT_stats[pos][ptm]['ARP']['oddsratio'] = oddsratio if", "to HTML output refProtStr = str(i+1) + '.' + '&nbsp;'*(6 -len(str(i))-1) + refProtStr", "last_pos) + color['ARP'][1] PTM_HTML.append(markdowner.convert(ARP_str)) # Create FOC string, highlighting positions of PTMs, and", "highlighting positions of PTMs, and append FOC_str = color['FOC'][0] + 'FOC:&nbsp;&nbsp;' + color['FOC'][1]", "list(seqMut.keys()): for ptm in list(seqMut[pos].keys()): if 'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()): # Create", "sys import os import json import csv import re import random import subprocess", "PTM_HTML.append(markdowner.convert(refProtStr)) # Create PAN string: same as ARP string PAN_str = color['PAN'][0] +", "the core overlaps with the fragment: same elif core[1] in range(i, i +", "elif core[1] in range(i, i + 70): core = [idx -i for idx", "= defaultdict(lambda: defaultdict((int))) # For each sequence for seq in data: # Initialize:", "reference refProt = reference_retreive(options['refProt']) # Get binding cores and binding core positions coreIdxs,", "binding core positions coreIdxs, coreClass = getBindingCore(options, refProt) # Map mutations seqMut, vaccSample", "in list(MUT_stats[pos][mut].keys()): PAN_mut_str = PAN_mut_str + 'p={:.2})'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n'))", "# Create HTML output map2HTML(options, coreIdxs, coreClass, refProt, MUT_stats, seqMut, vaccSample) if __name__", "the core overlaps with that fragment if core[0] in range(i, i + 70):", "refProtStr = refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count", "core] if coreCl == 'strong': refProtStr = color['strongBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1]", "-1 - i) + color['PAN'][1] + refProt[pos-1] last_pos = pos - i PAN_str", "for pos in range(i,i+70): if pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut in", "'&mdash;'*(pos - last_pos -1 - i) + color['FOC'][1] + refProt[pos-1] for mut in", "in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp = vaccSample[pos][vacc] PAN_prop", "idx for idx in core] if coreCl == 'strong': refProtStr = color['strongBinder'][0] +", "'\\n' PTM_HTML.append(markdowner.convert(PAN_mut_str)) # Separate PTM_HTML.append(markdowner.convert('&nbsp;\\n')) # Update index i += 70 # Print", "defaultdict(lambda: defaultdict((int))) # For each sequence for seq in data: # Initialize: sequence", "and binding core positions coreIdxs, coreClass = getBindingCore(options, refProt) # Map mutations seqMut,", "string, highlighting positions of PTMs, and append ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;' +", "mut in seqMut[pos]): PAN_str = PAN_str + color['PAN'][0] + '&mdash;'*(pos - last_pos -1", "'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda: defaultdict(int))) last_pos = 0 for pos", "utils import getBindingCore, importBindData,\\ importData, reference_retreive, div0, getBindingCore, getRandomColor def statisticalTest(options, seqMut, vaccSample,", "ending position of the core overlaps with the fragment: same elif core[1] in", "for each PTM positon and type for pos in list(mut_dict.keys()): for mut in", "open('options.json','r') as inFile: options = json.load(inFile) # Import data data = importData(options) #", "AA, pos in zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]] +=", "Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) # For each position for", "re.sub('\\[.+?\\]', '', AAseq) init_pos = int(seq[2]) # Check for mutations for AA, pos", "# Count instances vaccSample[pos][seq[3]] += 1 # If there is a mutation append", "and highlight based on class core = [idx -i for idx in core]", "and append ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1] mut_dict = defaultdict(lambda: defaultdict(lambda:", "refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos = pos - i", "for idx in core] if coreCl == 'strong': refProtStr = refProtStr[0:core[0]] + color['strongBinder'][0]", "color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # Append", "zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))): # Count instances vaccSample[pos][seq[3]] += 1 # If", "+ refProtStr[core[0]:core[1]] + \\ color['strongBinder'][1] + refProtStr[core[1]:] count += 1 else: refProtStr =", "# Create string of reference protein (taking 70 AA) refProtStr = refProt[i:i+70] count", "range(options['pos_range'][0], options['pos_range'][1]+1): if pos in list(seqMut.keys()): for ptm in list(seqMut[pos].keys()): if 'PAN' and", "options): # Initialize outputs seqMUT = defaultdict(lambda: defaultdict(lambda : defaultdict(int))) vaccSample = defaultdict(lambda:", "# Import data data = importData(options) # Import protein of reference refProt =", "highlighting positions of PTMs, and append ARP_str = color['ARP'][0] + 'ARP:&nbsp;&nbsp;' + color['ARP'][1]", "import getBindingCore, importBindData,\\ importData, reference_retreive, div0, getBindingCore, getRandomColor def statisticalTest(options, seqMut, vaccSample, refProt):", "for core, coreCl in zip(coreIdxs, coreClass): # If initial position of the core", "pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str", "+ refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['ARP'] = seqMut[pos][mut]['ARP'] last_pos = pos -", "= FOC_str + color['FOC'][0] + '&mdash;'*(70 - last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create", "Create ARP string, highlighting positions of PTMs, and append ARP_str = color['ARP'][0] +", "class for core, coreCl in zip(coreIdxs, coreClass): # If initial position of the", "hightlight if count == 0: # Update core idxes, and highlight based on", "position of the core overlaps with that fragment if core[0] in range(i, i", "index i += 70 # Print and save with open(options['html'][\"scroll-template\"], 'r') as inFile:", "- last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for each PTM positon and", "from Bio import Entrez from Bio import SeqIO from collections import defaultdict, OrderedDict", "vaccSample): # Initialize PTM_HTML = list() markdowner = Markdown() color = getRandomColor(options) refProt", "refProtStr[0:core[0]] + color['weakBinder'][0] + refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1", "= reference_retreive(options['refProt']) # Get binding cores and binding core positions coreIdxs, coreClass =", "defaultdict(lambda: defaultdict(int))) last_pos = 0 for pos in range(i,i+70): if pos in list(seqMut.keys()):", "- last_pos -1 - i) + color['ARP'][1] + refProt[pos-1] for mut in seqMut[pos]:", "getBindingCore, importBindData,\\ importData, reference_retreive, div0, getBindingCore, getRandomColor def statisticalTest(options, seqMut, vaccSample, refProt): #", "refProt, MUT_stats, seqMut, vaccSample): # Initialize PTM_HTML = list() markdowner = Markdown() color", "list(mut_dict[pos].keys()): for vacc in list(mut_dict[pos][mut].keys()): if mut_dict[pos][mut][vacc] > 0: vacc_prop = seqMut[pos][mut][vacc]/vaccSample[pos][vacc] vacc_samp", "where there is no samples from any of the # vaccines for pos", "= PAN_str + color['PAN'][0] + '&mdash;'*(pos - last_pos -1 - i) + color['PAN'][1]", "each sequence for seq in data: # Initialize: sequence with and without PTM,", "open(options['files']['mutMapJacob.html'], 'w') as outFile: for line in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main(): #", "if pos in list(MUT_stats.keys()) and vacc in list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05:", "vaccSample, refProt): # Initialize MUT_stats = defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) # For", "Update core idxes, and highlight based on class core = [idx -i for", "vaccSample = mapMutations(data, refProt, options) # Compute Fisher exact test MUT_stats = statisticalTest(options,", "Fisher exact test MUT_stats = statisticalTest(options, seqMut, vaccSample, refProt) # Create HTML output", "highlight based on class core = [idx -i for idx in core] if", "if len(seqMUT[pos]) < 1: del seqMUT[pos] return seqMUT, vaccSample def map2HTML(options, coreIdxs, coreClass,", "\\ color['mut'][0] + mut + color['mut'][1] + \\ '(' + vacc + ':{:.2%}({}),PAN:{:.2%}({}),'.format(vacc_prop,", "position for pos in range(options['pos_range'][0], options['pos_range'][1]+1): if pos in list(seqMut.keys()): for ptm in", "- i) + color['FOC'][1] + refProt[pos-1] for mut in seqMut[pos]: mut_dict[pos][mut]['FOC'] = seqMut[pos][mut]['FOC']", "Print and save with open(options['html'][\"scroll-template\"], 'r') as inFile: with open(options['files']['mutMapJacob.html'], 'w') as outFile:", "inFile: options = json.load(inFile) # Import data data = importData(options) # Import protein", "+ color['PAN'][1] + refProt[pos-1] last_pos = pos - i PAN_str = PAN_str +", "seqMUT[pos][ptm]['PAN']) \\ and not(seqMUT[pos][ptm]['FOC'] and seqMUT[pos][ptm]['PAN']): del seqMUT[pos][ptm] if len(seqMUT[pos]) < 1: del", "i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]] +", "in core] core = [0 if idx < 0 else idx for idx", "idxes, and highlight based on class core = [idx -i for idx in", "+ color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue']) + '\\n' elif pos in list(MUT_stats.keys()) and vacc in", "if core[0] in range(i, i + 70): # If no previous hightlight if", "in list(seqMut[pos].keys()): if 'PAN' and 'ARP' in list(seqMut[pos][ptm].keys()): # Create array ptm_positive =", "exact test MUT_stats = statisticalTest(options, seqMut, vaccSample, refProt) # Create HTML output map2HTML(options,", "refProt[pos]: seqMUT[pos][AA][seq[3]] += 1 # Filter positions where there is no samples from", "For each binding core and class for core, coreCl in zip(coreIdxs, coreClass): #", "def mapMutations(data, refProt, options): # Initialize outputs seqMUT = defaultdict(lambda: defaultdict(lambda : defaultdict(int)))", "refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If ending position", "div0, getBindingCore, getRandomColor def statisticalTest(options, seqMut, vaccSample, refProt): # Initialize MUT_stats = defaultdict(lambda:", "defaultdict(lambda: defaultdict(lambda : defaultdict(lambda: defaultdict(int)))) # For each position for pos in range(options['pos_range'][0],", "ptm_positive = [seqMut[pos][ptm]['FOC'], seqMut[pos][ptm]['PAN']] ptm_negative = [vaccSample[pos]['FOC'] - seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']]", "oddsratio return MUT_stats def mapMutations(data, refProt, options): # Initialize outputs seqMUT = defaultdict(lambda:", "pos in list(seqMut.keys()): if any(seqMut[pos][mut]['FOC'] for mut in seqMut[pos]): FOC_str = FOC_str +", "vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i -3+ 6) + \\ color['mut'][0] + mut +", "+ 70): # If no previous hightlight if count == 0: # Update", "if idx < 0 else idx for idx in core] if coreCl ==", "overlaps with the fragment: same elif core[1] in range(i, i + 70): core", "color = getRandomColor(options) refProt = ''.join([refProt[pos] for pos in refProt]) # In blocks", "= vaccSample[pos][vacc] PAN_prop = seqMut[pos][mut]['PAN']/vaccSample[pos]['PAN'] PAN_samp = vaccSample[pos]['PAN'] PAN_mut_str = '&nbsp;'*(pos -i -3+", "for mutations for AA, pos in zip(AAnonPTM, range(init_pos, init_pos + len(AAnonPTM))): # Count", "+ refProtStr[core[0]:core[1]] + \\ color['weakBinder'][1] + refProtStr[core[1]:] count += 1 # If ending", "import stats from utils import getBindingCore, importBindData,\\ importData, reference_retreive, div0, getBindingCore, getRandomColor def", "- seqMut[pos][ptm]['FOC'], \\ vaccSample[pos]['PAN'] - seqMut[pos][ptm]['PAN']] # Fisher test and append to output", "list(MUT_stats[pos][mut].keys()) \\ and MUT_stats[pos][mut][vacc]['pvalue'] < 0.05: PAN_mut_str = PAN_mut_str + color['red'][0] + 'p={:.2}'.format(MUT_stats[pos][mut][vacc]['pvalue'])", "- i + count*(len(color['strongBinder'][0]) + len(color['strongBinder'][1])) for idx in core] refProtStr = refProtStr[0:core[0]]", "'w') as outFile: for line in inFile: outFile.write(line) outFile.writelines(PTM_HTML) def main(): # Read", "+ '&mdash;'*(70 - last_pos) + color['FOC'][1] PTM_HTML.append(markdowner.convert(FOC_str)) # Create strings for each PTM" ]
[ "script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash = BashScriptExecution() result = bash.execute(command)", "self, result: dict, output_file_path: str): helper.save_json_file( result['output'], output_file_path) def modify_json_file( self, result: dict,", "BashScriptExecution() result = bash.execute(command) else: return ValueError('Invalid type received') if output_file_path is not", "helper class CustomScriptExecution(object): def execute( self, script_type: ScriptType, command: str, output_file_path: str =", "class CustomScriptExecution(object): def execute( self, script_type: ScriptType, command: str, output_file_path: str = None,", "= None) -> dict: if script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh", "orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash = BashScriptExecution() result = bash.execute(command) else: return ValueError('Invalid type", "file_path_to_update is not None and\\ len(file_path_to_update) > 0: self.modify_json_file( result= result, property_path= property_path,", "property_path= property_path, file_path_to_update= file_path_to_update) return result['output'] def save_json_file( self, result: dict, output_file_path: str):", "from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash = BashScriptExecution() result = bash.execute(command) else: return ValueError('Invalid", "len(output_file_path) > 0: self.save_json_file( result, output_file_path) if property_path is not None and\\ len(property_path)", "else: return ValueError('Invalid type received') if output_file_path is not None and\\ len(output_file_path) >", "ScriptType, command: str, output_file_path: str = None, property_path: str = None, file_path_to_update: str", "= None, file_path_to_update: str = None) -> dict: if script_type == ScriptType.POWERSHELL: from", "= None, property_path: str = None, file_path_to_update: str = None) -> dict: if", "None and\\ len(property_path) > 0 and\\ file_path_to_update is not None and\\ len(file_path_to_update) >", "ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh = PowershellScriptExecution() result = pwsh.execute(command) elif script_type", "script_type: ScriptType, command: str, output_file_path: str = None, property_path: str = None, file_path_to_update:", "import ScriptType from orchestration.common import helper class CustomScriptExecution(object): def execute( self, script_type: ScriptType,", "output_file_path: str): helper.save_json_file( result['output'], output_file_path) def modify_json_file( self, result: dict, property_path: str, file_path_to_update:", "not None and\\ len(output_file_path) > 0: self.save_json_file( result, output_file_path) if property_path is not", "script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh = PowershellScriptExecution() result = pwsh.execute(command)", "result['output'], output_file_path) def modify_json_file( self, result: dict, property_path: str, file_path_to_update: str): helper.modify_json_file( prop_value=", "-> dict: if script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh = PowershellScriptExecution()", "str, output_file_path: str = None, property_path: str = None, file_path_to_update: str = None)", "is not None and\\ len(property_path) > 0 and\\ file_path_to_update is not None and\\", "None and\\ len(file_path_to_update) > 0: self.modify_json_file( result= result, property_path= property_path, file_path_to_update= file_path_to_update) return", "> 0 and\\ file_path_to_update is not None and\\ len(file_path_to_update) > 0: self.modify_json_file( result=", "len(property_path) > 0 and\\ file_path_to_update is not None and\\ len(file_path_to_update) > 0: self.modify_json_file(", "from orchestration.common import helper class CustomScriptExecution(object): def execute( self, script_type: ScriptType, command: str,", "output_file_path: str = None, property_path: str = None, file_path_to_update: str = None) ->", "> 0: self.save_json_file( result, output_file_path) if property_path is not None and\\ len(property_path) >", "import helper class CustomScriptExecution(object): def execute( self, script_type: ScriptType, command: str, output_file_path: str", "PowershellScriptExecution() result = pwsh.execute(command) elif script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash", "None, property_path: str = None, file_path_to_update: str = None) -> dict: if script_type", "self, result: dict, property_path: str, file_path_to_update: str): helper.modify_json_file( prop_value= result['output'], prop_key= property_path, path=", "self, script_type: ScriptType, command: str, output_file_path: str = None, property_path: str = None,", "ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash = BashScriptExecution() result = bash.execute(command) else: return", "== ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh = PowershellScriptExecution() result = pwsh.execute(command) elif", "from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh = PowershellScriptExecution() result = pwsh.execute(command) elif script_type ==", "is not None and\\ len(file_path_to_update) > 0: self.modify_json_file( result= result, property_path= property_path, file_path_to_update=", "> 0: self.modify_json_file( result= result, property_path= property_path, file_path_to_update= file_path_to_update) return result['output'] def save_json_file(", "file_path_to_update= file_path_to_update) return result['output'] def save_json_file( self, result: dict, output_file_path: str): helper.save_json_file( result['output'],", "file_path_to_update) return result['output'] def save_json_file( self, result: dict, output_file_path: str): helper.save_json_file( result['output'], output_file_path)", "0: self.modify_json_file( result= result, property_path= property_path, file_path_to_update= file_path_to_update) return result['output'] def save_json_file( self,", "result['output'] def save_json_file( self, result: dict, output_file_path: str): helper.save_json_file( result['output'], output_file_path) def modify_json_file(", "None and\\ len(output_file_path) > 0: self.save_json_file( result, output_file_path) if property_path is not None", "len(file_path_to_update) > 0: self.modify_json_file( result= result, property_path= property_path, file_path_to_update= file_path_to_update) return result['output'] def", "str = None, property_path: str = None, file_path_to_update: str = None) -> dict:", "<reponame>dave-read/vdc from orchestration.models.script_type import ScriptType from orchestration.common import helper class CustomScriptExecution(object): def execute(", "property_path, file_path_to_update= file_path_to_update) return result['output'] def save_json_file( self, result: dict, output_file_path: str): helper.save_json_file(", "ValueError('Invalid type received') if output_file_path is not None and\\ len(output_file_path) > 0: self.save_json_file(", "PowershellScriptExecution pwsh = PowershellScriptExecution() result = pwsh.execute(command) elif script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution", "pwsh = PowershellScriptExecution() result = pwsh.execute(command) elif script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import", "and\\ len(file_path_to_update) > 0: self.modify_json_file( result= result, property_path= property_path, file_path_to_update= file_path_to_update) return result['output']", "received') if output_file_path is not None and\\ len(output_file_path) > 0: self.save_json_file( result, output_file_path)", "self.save_json_file( result, output_file_path) if property_path is not None and\\ len(property_path) > 0 and\\", "return ValueError('Invalid type received') if output_file_path is not None and\\ len(output_file_path) > 0:", "0: self.save_json_file( result, output_file_path) if property_path is not None and\\ len(property_path) > 0", "orchestration.common import helper class CustomScriptExecution(object): def execute( self, script_type: ScriptType, command: str, output_file_path:", "ScriptType from orchestration.common import helper class CustomScriptExecution(object): def execute( self, script_type: ScriptType, command:", "None, file_path_to_update: str = None) -> dict: if script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution", "self.modify_json_file( result= result, property_path= property_path, file_path_to_update= file_path_to_update) return result['output'] def save_json_file( self, result:", "output_file_path) def modify_json_file( self, result: dict, property_path: str, file_path_to_update: str): helper.modify_json_file( prop_value= result['output'],", "= pwsh.execute(command) elif script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash = BashScriptExecution()", "bash.execute(command) else: return ValueError('Invalid type received') if output_file_path is not None and\\ len(output_file_path)", "save_json_file( self, result: dict, output_file_path: str): helper.save_json_file( result['output'], output_file_path) def modify_json_file( self, result:", "dict, output_file_path: str): helper.save_json_file( result['output'], output_file_path) def modify_json_file( self, result: dict, property_path: str,", "output_file_path) if property_path is not None and\\ len(property_path) > 0 and\\ file_path_to_update is", "def execute( self, script_type: ScriptType, command: str, output_file_path: str = None, property_path: str", "execute( self, script_type: ScriptType, command: str, output_file_path: str = None, property_path: str =", "str = None, file_path_to_update: str = None) -> dict: if script_type == ScriptType.POWERSHELL:", "output_file_path is not None and\\ len(output_file_path) > 0: self.save_json_file( result, output_file_path) if property_path", "file_path_to_update: str = None) -> dict: if script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import", "= BashScriptExecution() result = bash.execute(command) else: return ValueError('Invalid type received') if output_file_path is", "property_path: str = None, file_path_to_update: str = None) -> dict: if script_type ==", "and\\ len(property_path) > 0 and\\ file_path_to_update is not None and\\ len(file_path_to_update) > 0:", "def save_json_file( self, result: dict, output_file_path: str): helper.save_json_file( result['output'], output_file_path) def modify_json_file( self,", "modify_json_file( self, result: dict, property_path: str, file_path_to_update: str): helper.modify_json_file( prop_value= result['output'], prop_key= property_path,", "from orchestration.models.script_type import ScriptType from orchestration.common import helper class CustomScriptExecution(object): def execute( self,", "and\\ file_path_to_update is not None and\\ len(file_path_to_update) > 0: self.modify_json_file( result= result, property_path=", "command: str, output_file_path: str = None, property_path: str = None, file_path_to_update: str =", "result = pwsh.execute(command) elif script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash =", "None) -> dict: if script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh =", "result: dict, property_path: str, file_path_to_update: str): helper.modify_json_file( prop_value= result['output'], prop_key= property_path, path= file_path_to_update)", "not None and\\ len(file_path_to_update) > 0: self.modify_json_file( result= result, property_path= property_path, file_path_to_update= file_path_to_update)", "= PowershellScriptExecution() result = pwsh.execute(command) elif script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution", "str = None) -> dict: if script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution", "== ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash = BashScriptExecution() result = bash.execute(command) else:", "import BashScriptExecution bash = BashScriptExecution() result = bash.execute(command) else: return ValueError('Invalid type received')", "if property_path is not None and\\ len(property_path) > 0 and\\ file_path_to_update is not", "orchestration.models.script_type import ScriptType from orchestration.common import helper class CustomScriptExecution(object): def execute( self, script_type:", "result= result, property_path= property_path, file_path_to_update= file_path_to_update) return result['output'] def save_json_file( self, result: dict,", "pwsh.execute(command) elif script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash = BashScriptExecution() result", "dict: if script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh = PowershellScriptExecution() result", "return result['output'] def save_json_file( self, result: dict, output_file_path: str): helper.save_json_file( result['output'], output_file_path) def", "if script_type == ScriptType.POWERSHELL: from orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh = PowershellScriptExecution() result =", "BashScriptExecution bash = BashScriptExecution() result = bash.execute(command) else: return ValueError('Invalid type received') if", "and\\ len(output_file_path) > 0: self.save_json_file( result, output_file_path) if property_path is not None and\\", "str): helper.save_json_file( result['output'], output_file_path) def modify_json_file( self, result: dict, property_path: str, file_path_to_update: str):", "import PowershellScriptExecution pwsh = PowershellScriptExecution() result = pwsh.execute(command) elif script_type == ScriptType.BASH: from", "result, property_path= property_path, file_path_to_update= file_path_to_update) return result['output'] def save_json_file( self, result: dict, output_file_path:", "property_path is not None and\\ len(property_path) > 0 and\\ file_path_to_update is not None", "0 and\\ file_path_to_update is not None and\\ len(file_path_to_update) > 0: self.modify_json_file( result= result,", "def modify_json_file( self, result: dict, property_path: str, file_path_to_update: str): helper.modify_json_file( prop_value= result['output'], prop_key=", "bash = BashScriptExecution() result = bash.execute(command) else: return ValueError('Invalid type received') if output_file_path", "= bash.execute(command) else: return ValueError('Invalid type received') if output_file_path is not None and\\", "type received') if output_file_path is not None and\\ len(output_file_path) > 0: self.save_json_file( result,", "helper.save_json_file( result['output'], output_file_path) def modify_json_file( self, result: dict, property_path: str, file_path_to_update: str): helper.modify_json_file(", "if output_file_path is not None and\\ len(output_file_path) > 0: self.save_json_file( result, output_file_path) if", "result: dict, output_file_path: str): helper.save_json_file( result['output'], output_file_path) def modify_json_file( self, result: dict, property_path:", "orchestration.integration.custom_scripts.powershell_execution import PowershellScriptExecution pwsh = PowershellScriptExecution() result = pwsh.execute(command) elif script_type == ScriptType.BASH:", "result = bash.execute(command) else: return ValueError('Invalid type received') if output_file_path is not None", "not None and\\ len(property_path) > 0 and\\ file_path_to_update is not None and\\ len(file_path_to_update)", "CustomScriptExecution(object): def execute( self, script_type: ScriptType, command: str, output_file_path: str = None, property_path:", "is not None and\\ len(output_file_path) > 0: self.save_json_file( result, output_file_path) if property_path is", "result, output_file_path) if property_path is not None and\\ len(property_path) > 0 and\\ file_path_to_update", "elif script_type == ScriptType.BASH: from orchestration.integration.custom_scripts.bash_execution import BashScriptExecution bash = BashScriptExecution() result =" ]
[ "window to about less than quarter of the screen at 1920*1080 resolution screen", ", 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] ) ## time.sleep(0.1) keys =", "keys = key_check() while screen[778,250] < 130 or screen[778,250] > 200 : if", "if screen[765 , 360]<130 or screen[765 , 360]>200 : Move1(420 , 778) screen", "= key_check() if 'X' in keys: break Move2(0,0) while screen [778 , 480]<130", "keys: break Move2(0,0) while screen[778 , 590]<130 or screen[778 , 590]>200: if screen[765", "Move1(620 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] ) keys", ", Move2 import time from getkeys import key_check import cv2 def main ()", "360]<130 or screen[778 , 360]>200 : if screen[765 , 360]<130 or screen[765 ,", ", Move1 , Move2 import time from getkeys import key_check import cv2 def", ": if screen[765 , 360]<130 or screen[765 , 360]>200 : Move1(420 , 778)", "as np from grabscreen import grab_screen from directkeys import Up , Down ,", "screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] ) keys = key_check() if 'X' in", ": #Resize the game window to about less than quarter of the screen", "break Move2(0,0) while screen[778 , 360]<130 or screen[778 , 360]>200 : if screen[765", "screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] ) keys = key_check() ## time.sleep(0.1) if", ": Move1(420 , 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] ) ## time.sleep(0.1)", "screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] ) keys = key_check() if 'X' in", ", 360] ) ## time.sleep(0.1) keys = key_check() if 'X' in keys: break", "480]<130 or screen [778 , 480]>200 : if screen [765 , 480]<130 or", "screen[778,250] < 130 or screen[778,250] > 200 : if screen[765,250] < 130 or", "or screen[765 , 590]>200: Move1(620 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778", "time from getkeys import key_check import cv2 def main () : while(True) :", "cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] ) keys = key_check() if 'X' in keys: break", ": if screen [765 , 480]<130 or screen [765 , 480]>200 : Move1(525", "= key_check() ## time.sleep(0.1) if 'X' in keys: break Move2(0,0) while screen[778 ,", "ReleaseKey , Move1 , Move2 import time from getkeys import key_check import cv2", "600] ) keys = key_check() if 'X' in keys: break Move2(0,0) if 'X'", "keys: break Move2(0,0) while screen[778 , 360]<130 or screen[778 , 360]>200 : if", "from grabscreen import grab_screen from directkeys import Up , Down , PressKey ,", ", 360]>200 : if screen[765 , 360]<130 or screen[765 , 360]>200 : Move1(420", "in keys: break Move2(0,0) while screen[778 , 590]<130 or screen[778 , 590]>200: if", ", 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] ) keys =", "200 : Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] ) keys = key_check()", "360] ) ## time.sleep(0.1) keys = key_check() if 'X' in keys: break Move2(0,0)", "while screen[778 , 590]<130 or screen[778 , 590]>200: if screen[765 , 590]<130 or", "PressKey , ReleaseKey , Move1 , Move2 import time from getkeys import key_check", "resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while screen[778,250] < 130 or screen[778,250]", "screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while screen[778,250] < 130 or screen[778,250] >", "Move2(0,0) while screen [778 , 480]<130 or screen [778 , 480]>200 : if", "keys = key_check() if 'X' in keys: break Move2(0,0) while screen [778 ,", "Move2(0,0) while screen[778 , 590]<130 or screen[778 , 590]>200: if screen[765 , 590]<130", "break Move2(0,0) while screen[778 , 590]<130 or screen[778 , 590]>200: if screen[765 ,", "the game window to about less than quarter of the screen at 1920*1080", "or screen[765 , 360]>200 : Move1(420 , 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 ,", "360]>200 : if screen[765 , 360]<130 or screen[765 , 360]>200 : Move1(420 ,", "print(screen[778 , 360] ) ## time.sleep(0.1) keys = key_check() if 'X' in keys:", "[765 , 480]>200 : Move1(525 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778", "time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] ) keys = key_check() if 'X'", "or screen[778 , 590]>200: if screen[765 , 590]<130 or screen[765 , 590]>200: Move1(620", "key_check() if 'X' in keys: break Move2(0,0) while screen [778 , 480]<130 or", "keys = key_check() if 'X' in keys: break Move2(0,0) while screen[778 , 590]<130", "screen [765 , 480]<130 or screen [765 , 480]>200 : Move1(525 , 778)", "590]<130 or screen[765 , 590]>200: Move1(620 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY)", "numpy as np from grabscreen import grab_screen from directkeys import Up , Down", "250] ) keys = key_check() ## time.sleep(0.1) if 'X' in keys: break Move2(0,0)", "grab_screen from directkeys import Up , Down , PressKey , ReleaseKey , Move1", "if 'X' in keys: break Move2(0,0) while screen[778 , 360]<130 or screen[778 ,", "getkeys import key_check import cv2 def main () : while(True) : #Resize the", "while screen[778,250] < 130 or screen[778,250] > 200 : if screen[765,250] < 130", ": Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] ) keys = key_check() ##", "screen [765 , 480]>200 : Move1(525 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY)", "Move1 , Move2 import time from getkeys import key_check import cv2 def main", "#Resize the game window to about less than quarter of the screen at", "Move1(420 , 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] ) ## time.sleep(0.1) keys", "or screen [778 , 480]>200 : if screen [765 , 480]<130 or screen", "'X' in keys: break Move2(0,0) while screen [778 , 480]<130 or screen [778", "import time from getkeys import key_check import cv2 def main () : while(True)", "import cv2 def main () : while(True) : #Resize the game window to", "screen[765 , 590]<130 or screen[765 , 590]>200: Move1(620 , 778) ## time.sleep(0.1) screen", "778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] ) keys = key_check()", "if 'X' in keys: break Move2(0,0) while screen[778 , 590]<130 or screen[778 ,", "than quarter of the screen at 1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys =", "< 130 or screen[778,250] > 200 : if screen[765,250] < 130 or screen[765,250]", "= cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while screen[778,250] < 130 or screen[778,250] > 200", "= key_check() if 'X' in keys: break Move2(0,0) if 'X' in keys: break", "screen[778 , 590]<130 or screen[778 , 590]>200: if screen[765 , 590]<130 or screen[765", "screen at 1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while screen[778,250] <", "590]>200: Move1(620 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] )", "480]>200 : Move1(525 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480]", "= key_check() while screen[778,250] < 130 or screen[778,250] > 200 : if screen[765,250]", "778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] ) keys = key_check()", "the screen at 1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while screen[778,250]", "import key_check import cv2 def main () : while(True) : #Resize the game", "1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while screen[778,250] < 130 or", "[765 , 480]<130 or screen [765 , 480]>200 : Move1(525 , 778) ##", ", Down , PressKey , ReleaseKey , Move1 , Move2 import time from", "if screen[765 , 590]<130 or screen[765 , 590]>200: Move1(620 , 778) ## time.sleep(0.1)", "= cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] ) keys = key_check() if 'X' in keys:", "Down , PressKey , ReleaseKey , Move1 , Move2 import time from getkeys", "'X' in keys: break Move2(0,0) while screen[778 , 590]<130 or screen[778 , 590]>200:", "[778 , 480]>200 : if screen [765 , 480]<130 or screen [765 ,", ", ReleaseKey , Move1 , Move2 import time from getkeys import key_check import", ", 360]>200 : Move1(420 , 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] )", "= key_check() if 'X' in keys: break Move2(0,0) while screen[778 , 590]<130 or", "time.sleep(0.1) keys = key_check() if 'X' in keys: break Move2(0,0) while screen [778", "or screen [765 , 480]>200 : Move1(525 , 778) ## time.sleep(0.1) screen =", "def main () : while(True) : #Resize the game window to about less", "cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] ) keys = key_check() if 'X' in keys: break", "grabscreen import grab_screen from directkeys import Up , Down , PressKey , ReleaseKey", "key_check() ## time.sleep(0.1) if 'X' in keys: break Move2(0,0) while screen[778 , 360]<130", ", 590]>200: if screen[765 , 590]<130 or screen[765 , 590]>200: Move1(620 , 778)", "## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] ) keys = key_check() if", "or screen[765,250] > 200 : Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] )", "cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while screen[778,250] < 130 or screen[778,250] > 200 :", "screen[778 , 360]<130 or screen[778 , 360]>200 : if screen[765 , 360]<130 or", "480]>200 : if screen [765 , 480]<130 or screen [765 , 480]>200 :", "## time.sleep(0.1) keys = key_check() if 'X' in keys: break Move2(0,0) while screen", "480] ) keys = key_check() if 'X' in keys: break Move2(0,0) while screen[778", ", 250] ) keys = key_check() ## time.sleep(0.1) if 'X' in keys: break", "130 or screen[778,250] > 200 : if screen[765,250] < 130 or screen[765,250] >", "= cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] ) keys = key_check() ## time.sleep(0.1) if 'X'", "from directkeys import Up , Down , PressKey , ReleaseKey , Move1 ,", "main () : while(True) : #Resize the game window to about less than", "at 1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while screen[778,250] < 130", "screen[765 , 590]>200: Move1(620 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 ,", "< 130 or screen[765,250] > 200 : Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 ,", ", 590]<130 or screen[778 , 590]>200: if screen[765 , 590]<130 or screen[765 ,", ", PressKey , ReleaseKey , Move1 , Move2 import time from getkeys import", "Move1(525 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] ) keys", "in keys: break Move2(0,0) while screen [778 , 480]<130 or screen [778 ,", "= cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] ) keys = key_check() if 'X' in keys:", "break Move2(0,0) while screen [778 , 480]<130 or screen [778 , 480]>200 :", "about less than quarter of the screen at 1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY)", "quarter of the screen at 1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check()", "import numpy as np from grabscreen import grab_screen from directkeys import Up ,", "if 'X' in keys: break Move2(0,0) while screen [778 , 480]<130 or screen", "screen[765 , 360]>200 : Move1(420 , 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360]", "[778 , 480]<130 or screen [778 , 480]>200 : if screen [765 ,", "590]>200: if screen[765 , 590]<130 or screen[765 , 590]>200: Move1(620 , 778) ##", "while(True) : #Resize the game window to about less than quarter of the", "if screen[765,250] < 130 or screen[765,250] > 200 : Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY)", "screen[765,250] < 130 or screen[765,250] > 200 : Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778", "screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] ) ## time.sleep(0.1) keys = key_check() if", "screen[778,250] > 200 : if screen[765,250] < 130 or screen[765,250] > 200 :", "Move2 import time from getkeys import key_check import cv2 def main () :", "## time.sleep(0.1) if 'X' in keys: break Move2(0,0) while screen[778 , 360]<130 or", "game window to about less than quarter of the screen at 1920*1080 resolution", "or screen[778 , 360]>200 : if screen[765 , 360]<130 or screen[765 , 360]>200", "while screen [778 , 480]<130 or screen [778 , 480]>200 : if screen", "200 : if screen[765,250] < 130 or screen[765,250] > 200 : Move1(307,778) screen", "np from grabscreen import grab_screen from directkeys import Up , Down , PressKey", "screen[778 , 360]>200 : if screen[765 , 360]<130 or screen[765 , 360]>200 :", ") keys = key_check() if 'X' in keys: break Move2(0,0) while screen[778 ,", ", 590]>200: Move1(620 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600]", ": if screen[765,250] < 130 or screen[765,250] > 200 : Move1(307,778) screen =", "if screen [765 , 480]<130 or screen [765 , 480]>200 : Move1(525 ,", "480]<130 or screen [765 , 480]>200 : Move1(525 , 778) ## time.sleep(0.1) screen", "778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] ) ## time.sleep(0.1) keys = key_check()", ", 360]<130 or screen[765 , 360]>200 : Move1(420 , 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY)", "360]<130 or screen[765 , 360]>200 : Move1(420 , 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778", "> 200 : Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] ) keys =", ", 480]>200 : if screen [765 , 480]<130 or screen [765 , 480]>200", ", 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] ) keys =", "key_check() if 'X' in keys: break Move2(0,0) while screen[778 , 590]<130 or screen[778", "cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] ) ## time.sleep(0.1) keys = key_check() if 'X' in", ", 600] ) keys = key_check() if 'X' in keys: break Move2(0,0) if", "print(screen[778 , 480] ) keys = key_check() if 'X' in keys: break Move2(0,0)", "time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 600] ) keys = key_check() if 'X'", "'X' in keys: break Move2(0,0) while screen[778 , 360]<130 or screen[778 , 360]>200", "import grab_screen from directkeys import Up , Down , PressKey , ReleaseKey ,", "360]>200 : Move1(420 , 778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] ) ##", ", 480]<130 or screen [765 , 480]>200 : Move1(525 , 778) ## time.sleep(0.1)", ") keys = key_check() ## time.sleep(0.1) if 'X' in keys: break Move2(0,0) while", "in keys: break Move2(0,0) while screen[778 , 360]<130 or screen[778 , 360]>200 :", ", 480]>200 : Move1(525 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 ,", "screen[778 , 590]>200: if screen[765 , 590]<130 or screen[765 , 590]>200: Move1(620 ,", "Up , Down , PressKey , ReleaseKey , Move1 , Move2 import time", ", 590]<130 or screen[765 , 590]>200: Move1(620 , 778) ## time.sleep(0.1) screen =", "key_check() while screen[778,250] < 130 or screen[778,250] > 200 : if screen[765,250] <", "130 or screen[765,250] > 200 : Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250]", "Move2(0,0) while screen[778 , 360]<130 or screen[778 , 360]>200 : if screen[765 ,", "print(screen[778 , 600] ) keys = key_check() if 'X' in keys: break Move2(0,0)", "keys = key_check() ## time.sleep(0.1) if 'X' in keys: break Move2(0,0) while screen[778", "cv2 def main () : while(True) : #Resize the game window to about", "less than quarter of the screen at 1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys", "cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] ) keys = key_check() ## time.sleep(0.1) if 'X' in", ", 480]<130 or screen [778 , 480]>200 : if screen [765 , 480]<130", "to about less than quarter of the screen at 1920*1080 resolution screen =", "keys: break Move2(0,0) while screen [778 , 480]<130 or screen [778 , 480]>200", "key_check import cv2 def main () : while(True) : #Resize the game window", "590]<130 or screen[778 , 590]>200: if screen[765 , 590]<130 or screen[765 , 590]>200:", ", 480] ) keys = key_check() if 'X' in keys: break Move2(0,0) while", "or screen[778,250] > 200 : if screen[765,250] < 130 or screen[765,250] > 200", "## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] ) keys = key_check() if", ") ## time.sleep(0.1) keys = key_check() if 'X' in keys: break Move2(0,0) while", "screen[765,250] > 200 : Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] ) keys", "while screen[778 , 360]<130 or screen[778 , 360]>200 : if screen[765 , 360]<130", "directkeys import Up , Down , PressKey , ReleaseKey , Move1 , Move2", "screen [778 , 480]>200 : if screen [765 , 480]<130 or screen [765", "time.sleep(0.1) if 'X' in keys: break Move2(0,0) while screen[778 , 360]<130 or screen[778", "keys = key_check() if 'X' in keys: break Move2(0,0) if 'X' in keys:", "() : while(True) : #Resize the game window to about less than quarter", ") keys = key_check() if 'X' in keys: break Move2(0,0) if 'X' in", "Move1(307,778) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 250] ) keys = key_check() ## time.sleep(0.1)", "of the screen at 1920*1080 resolution screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) keys = key_check() while", "= cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 360] ) ## time.sleep(0.1) keys = key_check() if 'X'", ": Move1(525 , 778) ## time.sleep(0.1) screen = cv2.cvtColor(grab_screen(region=(0,0,800,800)),cv2.COLOR_RGB2GRAY) print(screen[778 , 480] )", "screen [778 , 480]<130 or screen [778 , 480]>200 : if screen [765", ", 360]<130 or screen[778 , 360]>200 : if screen[765 , 360]<130 or screen[765", "import Up , Down , PressKey , ReleaseKey , Move1 , Move2 import", ": while(True) : #Resize the game window to about less than quarter of", "print(screen[778 , 250] ) keys = key_check() ## time.sleep(0.1) if 'X' in keys:", "screen[765 , 360]<130 or screen[765 , 360]>200 : Move1(420 , 778) screen =", "key_check() if 'X' in keys: break Move2(0,0) if 'X' in keys: break main()", "> 200 : if screen[765,250] < 130 or screen[765,250] > 200 : Move1(307,778)", "from getkeys import key_check import cv2 def main () : while(True) : #Resize" ]
[ "from django.urls import path from graph.api import views urlpatterns = [ path('', views.GraphList.as_view(),", "path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(), name='graph-list'), path('create/', views.GraphCreate.as_view(), name='graph-list'), path('get_graphs/', views.get_graphs, name='get_graphs'), ]", "<filename>src/backend/graph/api/urls.py<gh_stars>0 from django.urls import path from graph.api import views urlpatterns = [ path('',", "path from graph.api import views urlpatterns = [ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(),", "django.urls import path from graph.api import views urlpatterns = [ path('', views.GraphList.as_view(), name='graph-list'),", "import path from graph.api import views urlpatterns = [ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>',", "from graph.api import views urlpatterns = [ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(), name='graph-list'),", "views urlpatterns = [ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(), name='graph-list'), path('create/', views.GraphCreate.as_view(), name='graph-list'),", "graph.api import views urlpatterns = [ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(), name='graph-list'), path('create/',", "urlpatterns = [ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(), name='graph-list'), path('create/', views.GraphCreate.as_view(), name='graph-list'), path('get_graphs/',", "= [ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(), name='graph-list'), path('create/', views.GraphCreate.as_view(), name='graph-list'), path('get_graphs/', views.get_graphs,", "import views urlpatterns = [ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(), name='graph-list'), path('create/', views.GraphCreate.as_view(),", "[ path('', views.GraphList.as_view(), name='graph-list'), path('<int:pk>', views.GraphDetail.as_view(), name='graph-list'), path('create/', views.GraphCreate.as_view(), name='graph-list'), path('get_graphs/', views.get_graphs, name='get_graphs')," ]
[ "url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if", "game list') for game_block in root.select('#games > .entry'): title = self.get_norm_text(game_block.select_one('.name')) if not", "in rs.url: self.log_info(f'Parsing of game list') for game_block in root.select('#games > .entry'): title", "genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий по имени, вариант return genres #", "'?genre=' in a['href'] ] # Сойдет первый, совпадающий по имени, вариант return genres", "совпадающий по имени, вариант return genres self.log_info(f'Not found game {self.game_name!r}') return [] def", "self.log_info(f'Not found game {self.game_name!r}') return [] def get_game_genres(game_name: str, *args, **kwargs) -> List[str]:", "вариант return genres self.log_info(f'Not found game {self.game_name!r}') return [] def get_game_genres(game_name: str, *args,", "'__main__': from common import _common_test _common_test(get_game_genres) # Search 'Hellgate: London'... # Genres: ['Action", "'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url: self.log_info(f'Parsing of game list') for game_block", "<a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a> # </td> genres =", "of game page') game_block = root.select_one('#page-info') if game_block: title = self.get_norm_text(game_block.select_one('#title')) if not", "по имени, вариант return genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game page') game_block", "not self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый,", "Search 'The Incredible Adventures of Van Helsing'... # Genres: ['Action RPG'] # #", "[] # # Search 'Twin Sector'... # Genres: [] # # Search 'Call", "list') for game_block in root.select('#games > .entry'): title = self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title):", "первый, совпадающий по имени, вариант return genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game", "title = self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not match game title {title!r}') # <td", "if __name__ == '__main__': from common import _common_test _common_test(get_game_genres) # Search 'Hellgate: London'...", "__name__ == '__main__': from common import _common_test _common_test(get_game_genres) # Search 'Hellgate: London'... #", "[] # # Search 'Call of Cthulhu: Dark Corners of the Earth'... #", "[] def get_game_genres(game_name: str, *args, **kwargs) -> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__", "List from bs4 import BeautifulSoup from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self)", "SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ == '__main__': from common import _common_test _common_test(get_game_genres) # Search", "def get_game_genres(game_name: str, *args, **kwargs) -> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ ==", "_common_test(get_game_genres) # Search 'Hellgate: London'... # Genres: ['Action RPG'] # # Search 'The", "_parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser')", "not self.is_found_game(title): self.log_warn(f'Not match game title {title!r}') # <td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>,", "http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game page') game_block = root.select_one('#page-info') if game_block: title =", "BeautifulSoup from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url =", "genres = [ self.get_norm_text(a) for a in game_block.select('a') if '?genre=' in a['href'] ]", "http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url: self.log_info(f'Parsing of game list') for game_block in root.select('#games", "self.is_found_game(title): self.log_warn(f'Not match game title {title!r}') # <td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, #", "utf-8 -*- __author__ = 'ipetrash' from typing import List from bs4 import BeautifulSoup", "# Сойдет первый, совпадающий по имени, вариант return genres self.log_info(f'Not found game {self.game_name!r}')", "return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ == '__main__': from common import _common_test _common_test(get_game_genres) #", "else: self.log_info(f'Parsing of game page') game_block = root.select_one('#page-info') if game_block: title = self.get_norm_text(game_block.select_one('#title'))", "Die Edition'... # Genres: [] # # Search 'Twin Sector'... # Genres: []", "*args, **kwargs) -> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ == '__main__': from common", "if not self.is_found_game(title): self.log_warn(f'Not match game title {title!r}') # <td class=\"nowraps-links\"> # <a", "game title {title!r}') # <td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>,", "href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a> # </td> genres = [", "href=\"/games?genre=action\">Action</a> # </td> genres = [ self.get_norm_text(a) for a in game_block.select('a') if '?genre='", "from common import _common_test _common_test(get_game_genres) # Search 'Hellgate: London'... # Genres: ['Action RPG']", "# </td> genres = [ self.get_norm_text(a) for a in game_block.select('a') if '?genre=' in", "= self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url: self.log_info(f'Parsing", "BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url)", "# Search 'Hellgate: London'... # Genres: ['Action RPG'] # # Search 'The Incredible", "# Search 'Dark Souls: Prepare to Die Edition'... # Genres: [] # #", "rs.url: self.log_info(f'Parsing of game list') for game_block in root.select('#games > .entry'): title =", "# Search 'Twin Sector'... # Genres: [] # # Search 'Call of Cthulhu:", "-> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser') #", "== '__main__': from common import _common_test _common_test(get_game_genres) # Search 'Hellgate: London'... # Genres:", "root = BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url: self.log_info(f'Parsing of game", "continue # <div class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий по", "game {self.game_name!r}') return [] def get_game_genres(game_name: str, *args, **kwargs) -> List[str]: return SquarefactionRu_Parser(*args,", "of game list') for game_block in root.select('#games > .entry'): title = self.get_norm_text(game_block.select_one('.name')) if", "str, *args, **kwargs) -> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ == '__main__': from", "__author__ = 'ipetrash' from typing import List from bs4 import BeautifulSoup from base_parser", "return genres self.log_info(f'Not found game {self.game_name!r}') return [] def get_game_genres(game_name: str, *args, **kwargs)", "**kwargs) -> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ == '__main__': from common import", "get_game_genres(game_name: str, *args, **kwargs) -> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ == '__main__':", "rs = self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url:", "self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not match game title {title!r}') # <td class=\"nowraps-links\"> #", "RPG'] # # Search 'The Incredible Adventures of Van Helsing'... # Genres: ['Action", "import _common_test _common_test(get_game_genres) # Search 'Hellgate: London'... # Genres: ['Action RPG'] # #", "-*- coding: utf-8 -*- __author__ = 'ipetrash' from typing import List from bs4", "имени, вариант return genres self.log_info(f'Not found game {self.game_name!r}') return [] def get_game_genres(game_name: str,", "<td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a> #", "= BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url: self.log_info(f'Parsing of game list')", "self.log_info(f'Parsing of game list') for game_block in root.select('#games > .entry'): title = self.get_norm_text(game_block.select_one('.name'))", "'/main/search/games' in rs.url: self.log_info(f'Parsing of game list') for game_block in root.select('#games > .entry'):", "= root.select_one('#page-info') if game_block: title = self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not match game", "Search 'Hellgate: London'... # Genres: ['Action RPG'] # # Search 'The Incredible Adventures", "# # Search 'Call of Cthulhu: Dark Corners of the Earth'... # Genres:", "of Van Helsing'... # Genres: ['Action RPG'] # # Search 'Dark Souls: Prepare", "['Action RPG'] # # Search 'The Incredible Adventures of Van Helsing'... # Genres:", "in game_block.select('a') if '?genre=' in a['href'] ] # Сойдет первый, совпадающий по имени,", "{self.game_name!r}') return [] def get_game_genres(game_name: str, *args, **kwargs) -> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name)", "совпадающий по имени, вариант return genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game page')", "# Genres: ['Action RPG'] # # Search 'The Incredible Adventures of Van Helsing'...", "'Hellgate: London'... # Genres: ['Action RPG'] # # Search 'The Incredible Adventures of", "genres self.log_info(f'Not found game {self.game_name!r}') return [] def get_game_genres(game_name: str, *args, **kwargs) ->", "game_block in root.select('#games > .entry'): title = self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue #", "for a in game_block.select('a') if '?genre=' in a['href'] ] # Сойдет первый, совпадающий", "# <div class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий по имени,", "# Search 'The Incredible Adventures of Van Helsing'... # Genres: ['Action RPG'] #", "</td> genres = [ self.get_norm_text(a) for a in game_block.select('a') if '?genre=' in a['href']", "# -*- coding: utf-8 -*- __author__ = 'ipetrash' from typing import List from", "первый, совпадающий по имени, вариант return genres self.log_info(f'Not found game {self.game_name!r}') return []", "['Action RPG'] # # Search 'Dark Souls: Prepare to Die Edition'... # Genres:", "Сойдет первый, совпадающий по имени, вариант return genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of", "if '?genre=' in a['href'] ] # Сойдет первый, совпадающий по имени, вариант return", "Genres: [] # # Search 'Twin Sector'... # Genres: [] # # Search", "base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs", "'Twin Sector'... # Genres: [] # # Search 'Call of Cthulhu: Dark Corners", "Helsing'... # Genres: ['Action RPG'] # # Search 'Dark Souls: Prepare to Die", "= self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий по имени, вариант return genres # http://squarefaction.ru/game/dead-space", "from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}'", "Incredible Adventures of Van Helsing'... # Genres: ['Action RPG'] # # Search 'Dark", "from typing import List from bs4 import BeautifulSoup from base_parser import BaseParser class", "List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space", "= 'ipetrash' from typing import List from bs4 import BeautifulSoup from base_parser import", "from bs4 import BeautifulSoup from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) ->", "# <a href=\"/games?genre=action\">Action</a> # </td> genres = [ self.get_norm_text(a) for a in game_block.select('a')", "SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root =", "'The Incredible Adventures of Van Helsing'... # Genres: ['Action RPG'] # # Search", "# # Search 'Dark Souls: Prepare to Die Edition'... # Genres: [] #", "import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs =", "def _parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root = BeautifulSoup(rs.content,", "found game {self.game_name!r}') return [] def get_game_genres(game_name: str, *args, **kwargs) -> List[str]: return", "# Genres: [] # # Search 'Call of Cthulhu: Dark Corners of the", "genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game page') game_block = root.select_one('#page-info') if game_block:", "a['href'] ] # Сойдет первый, совпадающий по имени, вариант return genres self.log_info(f'Not found", ".entry'): title = self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival Horror,Action</div> genres", "class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий по имени, вариант return", "coding: utf-8 -*- __author__ = 'ipetrash' from typing import List from bs4 import", "bs4 import BeautifulSoup from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]:", "> .entry'): title = self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival Horror,Action</div>", "London'... # Genres: ['Action RPG'] # # Search 'The Incredible Adventures of Van", "RPG'] # # Search 'Dark Souls: Prepare to Die Edition'... # Genres: []", "Prepare to Die Edition'... # Genres: [] # # Search 'Twin Sector'... #", "[ self.get_norm_text(a) for a in game_block.select('a') if '?genre=' in a['href'] ] # Сойдет", "python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' from typing import List", "<div class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий по имени, вариант", "= [ self.get_norm_text(a) for a in game_block.select('a') if '?genre=' in a['href'] ] #", "game_block: title = self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not match game title {title!r}') #", "class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a> # </td>", "if game_block: title = self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not match game title {title!r}')", "= self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not match game title {title!r}') # <td class=\"nowraps-links\">", "title {title!r}') # <td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>, #", "# # Search 'Twin Sector'... # Genres: [] # # Search 'Call of", "по имени, вариант return genres self.log_info(f'Not found game {self.game_name!r}') return [] def get_game_genres(game_name:", "Search 'Call of Cthulhu: Dark Corners of the Earth'... # Genres: ['Survival Horror']", "for game_block in root.select('#games > .entry'): title = self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue", "] # Сойдет первый, совпадающий по имени, вариант return genres self.log_info(f'Not found game", "root.select_one('#page-info') if game_block: title = self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not match game title", "Genres: ['Action RPG'] # # Search 'The Incredible Adventures of Van Helsing'... #", "Edition'... # Genres: [] # # Search 'Twin Sector'... # Genres: [] #", "вариант return genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game page') game_block = root.select_one('#page-info')", "import BeautifulSoup from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url", "self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий", "class SquarefactionRu_Parser(BaseParser): def _parse(self) -> List[str]: url = f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root", "имени, вариант return genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game page') game_block =", "page') game_block = root.select_one('#page-info') if game_block: title = self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not", "in a['href'] ] # Сойдет первый, совпадающий по имени, вариант return genres self.log_info(f'Not", "a in game_block.select('a') if '?genre=' in a['href'] ] # Сойдет первый, совпадающий по", "f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in", "# <td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a>", "-> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ == '__main__': from common import _common_test", "Genres: [] # # Search 'Call of Cthulhu: Dark Corners of the Earth'...", "Search 'Dark Souls: Prepare to Die Edition'... # Genres: [] # # Search", "Genres: ['Action RPG'] # # Search 'Dark Souls: Prepare to Die Edition'... #", "-*- __author__ = 'ipetrash' from typing import List from bs4 import BeautifulSoup from", "match game title {title!r}') # <td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival", "_common_test _common_test(get_game_genres) # Search 'Hellgate: London'... # Genres: ['Action RPG'] # # Search", "in root.select('#games > .entry'): title = self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue # <div", "root.select('#games > .entry'): title = self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival", "game page') game_block = root.select_one('#page-info') if game_block: title = self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title):", "to Die Edition'... # Genres: [] # # Search 'Twin Sector'... # Genres:", "self.log_warn(f'Not match game title {title!r}') # <td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, # <a", "if '/main/search/games' in rs.url: self.log_info(f'Parsing of game list') for game_block in root.select('#games >", "self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий по имени, вариант return genres # http://squarefaction.ru/game/dead-space else:", "# <a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a> # </td> genres", "href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a> # </td> genres = [ self.get_norm_text(a) for a", "import List from bs4 import BeautifulSoup from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser): def", "'Dark Souls: Prepare to Die Edition'... # Genres: [] # # Search 'Twin", "# http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url: self.log_info(f'Parsing of game list') for game_block in", "Sector'... # Genres: [] # # Search 'Call of Cthulhu: Dark Corners of", "# http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game page') game_block = root.select_one('#page-info') if game_block: title", "**kwargs).get_game_genres(game_name) if __name__ == '__main__': from common import _common_test _common_test(get_game_genres) # Search 'Hellgate:", "# Сойдет первый, совпадающий по имени, вариант return genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing", "BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url: self.log_info(f'Parsing of game list') for", "'ipetrash' from typing import List from bs4 import BeautifulSoup from base_parser import BaseParser", "return [] def get_game_genres(game_name: str, *args, **kwargs) -> List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if", "return genres # http://squarefaction.ru/game/dead-space else: self.log_info(f'Parsing of game page') game_block = root.select_one('#page-info') if", "= f'http://squarefaction.ru/main/search/games?q={self.game_name}' rs = self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games'", "Adventures of Van Helsing'... # Genres: ['Action RPG'] # # Search 'Dark Souls:", "game_block = root.select_one('#page-info') if game_block: title = self.get_norm_text(game_block.select_one('#title')) if not self.is_found_game(title): self.log_warn(f'Not match", "Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет первый, совпадающий по имени, вариант return genres", "# # Search 'The Incredible Adventures of Van Helsing'... # Genres: ['Action RPG']", "typing import List from bs4 import BeautifulSoup from base_parser import BaseParser class SquarefactionRu_Parser(BaseParser):", "{title!r}') # <td class=\"nowraps-links\"> # <a href=\"/games?genre=tps\">TPS</a>, # <a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a", "self.send_get(url) root = BeautifulSoup(rs.content, 'html.parser') # http://squarefaction.ru/main/search/games?q=dead+space if '/main/search/games' in rs.url: self.log_info(f'Parsing of", "self.log_info(f'Parsing of game page') game_block = root.select_one('#page-info') if game_block: title = self.get_norm_text(game_block.select_one('#title')) if", "List[str]: return SquarefactionRu_Parser(*args, **kwargs).get_game_genres(game_name) if __name__ == '__main__': from common import _common_test _common_test(get_game_genres)", "title = self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival Horror,Action</div> genres =", "<a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a> # </td> genres = [ self.get_norm_text(a) for", "self.get_norm_text(a) for a in game_block.select('a') if '?genre=' in a['href'] ] # Сойдет первый,", "game_block.select('a') if '?genre=' in a['href'] ] # Сойдет первый, совпадающий по имени, вариант", "Horror</a>, # <a href=\"/games?genre=action\">Action</a> # </td> genres = [ self.get_norm_text(a) for a in", "= self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',')", "# <a href=\"/games?genre=survival-horror\">Survival Horror</a>, # <a href=\"/games?genre=action\">Action</a> # </td> genres = [ self.get_norm_text(a)", "common import _common_test _common_test(get_game_genres) # Search 'Hellgate: London'... # Genres: ['Action RPG'] #", "Search 'Twin Sector'... # Genres: [] # # Search 'Call of Cthulhu: Dark", "Van Helsing'... # Genres: ['Action RPG'] # # Search 'Dark Souls: Prepare to", "# Search 'Call of Cthulhu: Dark Corners of the Earth'... # Genres: ['Survival", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- __author__ = 'ipetrash' from typing import", "if not self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') # Сойдет", "Souls: Prepare to Die Edition'... # Genres: [] # # Search 'Twin Sector'...", "# Genres: ['Action RPG'] # # Search 'Dark Souls: Prepare to Die Edition'...", "Сойдет первый, совпадающий по имени, вариант return genres self.log_info(f'Not found game {self.game_name!r}') return", "# Genres: [] # # Search 'Twin Sector'... # Genres: [] # #", "<a href=\"/games?genre=action\">Action</a> # </td> genres = [ self.get_norm_text(a) for a in game_block.select('a') if", "self.get_norm_text(game_block.select_one('.name')) if not self.is_found_game(title): continue # <div class=\"infos\">TPS,Survival Horror,Action</div> genres = self.get_norm_text(game_block.select_one('.infos')).split(',') #" ]
[ "encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool for managing envdirs and", "], keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={", "import here, cause outside the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args)", "Language :: Python :: Implementation :: CPython\", # \"Programming Language :: Python ::", "Unix\", \"Operating System :: POSIX\", # \"Operating System :: Microsoft :: Windows\", \"Environment", "# \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic :: Utilities\", ],", "zip_safe=False, classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2 - Pre-Alpha\",", ":: Python :: 3\", \"Programming Language :: Python :: 3.3\", \"Programming Language ::", "Python :: 3\", \"Programming Language :: Python :: 3.3\", \"Programming Language :: Python", "os.path import join from os.path import splitext import sys from setuptools import setup", "System :: Unix\", \"Operating System :: POSIX\", # \"Operating System :: Microsoft ::", ":: Python :: 3.4\", \"Programming Language :: Python :: Implementation :: CPython\", #", "import test as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a', \"Arguments to pass", "read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ #", "License\", \"Operating System :: Unix\", \"Operating System :: POSIX\", # \"Operating System ::", ":: 3\", \"Programming Language :: Python :: 3.3\", \"Programming Language :: Python ::", "TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import here, cause", "setuptools.command.test import test as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a', \"Arguments to", "\"Intended Audience :: Developers\", \"License :: OSI Approved :: BSD License\", \"Operating System", "cause outside the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) def", "3.3\", \"Programming Language :: Python :: 3.4\", \"Programming Language :: Python :: Implementation", "env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for", "py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = []", "io import re from os.path import basename from os.path import dirname from os.path", "utf-8 -*- import glob import io import re from os.path import basename from", "def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite", "install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ # eg: 'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [", ":: System Administrator\", \"Programming Language :: Python\", \"Programming Language :: Python :: 2.6\",", "setup from setuptools.command.test import test as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a',", "Implementation :: CPython\", # \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic", "Language :: Python :: 2.7\", \"Programming Language :: Python :: 3\", \"Programming Language", "[] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): #", "\"Topic :: Utilities\", ], keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\",", "\"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool for managing envdirs and env", "from os.path import join from os.path import splitext import sys from setuptools import", "<gh_stars>1-10 # -*- encoding: utf-8 -*- import glob import io import re from", "for i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development", "\"Operating System :: Unix\", \"Operating System :: POSIX\", # \"Operating System :: Microsoft", "self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def", "Windows\", \"Environment :: Console\", # \"Intended Audience :: System Administrator\", \"Programming Language ::", "from os.path import basename from os.path import dirname from os.path import join from", "managing envdirs and env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\",", "System :: POSIX\", # \"Operating System :: Microsoft :: Windows\", \"Environment :: Console\",", "setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool for managing envdirs and env files.\", long_description=\"%s\\n%s\"", "re from os.path import basename from os.path import dirname from os.path import join", "(read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")], include_package_data=True,", "\"Programming Language :: Python :: 2.6\", \"Programming Language :: Python :: 2.7\", \"Programming", "\"Operating System :: Microsoft :: Windows\", \"Environment :: Console\", # \"Intended Audience ::", "PyPy\", \"Topic :: Utilities\", ], keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\", ], install_requires=[", "Utilities\", ], keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ],", "\"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ # eg: 'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\",", "CPython\", # \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic :: Utilities\",", ":: 2 - Pre-Alpha\", \"Intended Audience :: Developers\", \"License :: OSI Approved ::", "import splitext import sys from setuptools import setup from setuptools.command.test import test as", "\"Programming Language :: Python :: 3.3\", \"Programming Language :: Python :: 3.4\", \"Programming", "Approved :: BSD License\", \"Operating System :: Unix\", \"Operating System :: POSIX\", #", "from os.path import dirname from os.path import join from os.path import splitext import", "Pre-Alpha\", \"Intended Audience :: Developers\", \"License :: OSI Approved :: BSD License\", \"Operating", "import re from os.path import basename from os.path import dirname from os.path import", "class PyTest(TestCommand): user_options = [('pytest-args=', 'a', \"Arguments to pass to py.test\")] def initialize_options(self):", "aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs): return io.open(", "complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2 - Pre-Alpha\", \"Intended Audience ::", "POSIX\", # \"Operating System :: Microsoft :: Windows\", \"Environment :: Console\", # \"Intended", "2.7\", \"Programming Language :: Python :: 3\", \"Programming Language :: Python :: 3.3\",", "3\", \"Programming Language :: Python :: 3.3\", \"Programming Language :: Python :: 3.4\",", "url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ # complete classifier list:", "Language :: Python :: 2.6\", \"Programming Language :: Python :: 2.7\", \"Programming Language", "= [] self.test_suite = True def run_tests(self): # import here, cause outside the", "BSD License\", \"Operating System :: Unix\", \"Operating System :: POSIX\", # \"Operating System", "pass to py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args", "\"Environment :: Console\", # \"Intended Audience :: System Administrator\", \"Programming Language :: Python\",", "\"Programming Language :: Python :: Implementation :: CPython\", # \"Programming Language :: Python", "\"click>=4.0.0\", ], extras_require={ # eg: 'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\", ]", "# eg: 'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\", ] }, cmdclass={'test': PyTest},", ":: Python :: 2.7\", \"Programming Language :: Python :: 3\", \"Programming Language ::", "self.test_suite = True def run_tests(self): # import here, cause outside the eggs aren't", "Audience :: Developers\", \"License :: OSI Approved :: BSD License\", \"Operating System ::", "as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a', \"Arguments to pass to py.test\")]", "import glob import io import re from os.path import basename from os.path import", "'a', \"Arguments to pass to py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def", "\"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ # eg: 'rst': [\"docutils>=0.11\"], },", "glob import io import re from os.path import basename from os.path import dirname", ":: POSIX\", # \"Operating System :: Microsoft :: Windows\", \"Environment :: Console\", #", "[\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\", ] }, cmdclass={'test': PyTest}, tests_require=[ \"pytest>=2.7.2\", ]", "\"Development Status :: 2 - Pre-Alpha\", \"Intended Audience :: Developers\", \"License :: OSI", "Language :: Python\", \"Programming Language :: Python :: 2.6\", \"Programming Language :: Python", "join from os.path import splitext import sys from setuptools import setup from setuptools.command.test", "pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup(", "\"Operating System :: POSIX\", # \"Operating System :: Microsoft :: Windows\", \"Environment ::", "splitext import sys from setuptools import setup from setuptools.command.test import test as TestCommand", "\"honcho\", \"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ # eg: 'rst': [\"docutils>=0.11\"],", "license=\"BSD\", description=\"A tool for managing envdirs and env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\",", "setuptools import setup from setuptools.command.test import test as TestCommand class PyTest(TestCommand): user_options =", "Language :: Python :: 3.3\", \"Programming Language :: Python :: 3.4\", \"Programming Language", "join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool for managing", "version=\"0.1.0\", license=\"BSD\", description=\"A tool for managing envdirs and env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"),", "author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ # complete", "= [('pytest-args=', 'a', \"Arguments to pass to py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args =", "-*- encoding: utf-8 -*- import glob import io import re from os.path import", ").read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool for managing envdirs and env files.\",", "i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status", "Implementation :: PyPy\", \"Topic :: Utilities\", ], keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\",", "-*- import glob import io import re from os.path import basename from os.path", "\"Programming Language :: Python :: Implementation :: PyPy\", \"Topic :: Utilities\", ], keywords=[", ":: Python :: Implementation :: PyPy\", \"Topic :: Utilities\", ], keywords=[ \"environment\", \"envdir\",", "io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool for", "Python :: 2.6\", \"Programming Language :: Python :: 2.7\", \"Programming Language :: Python", "\"Programming Language :: Python :: 3.4\", \"Programming Language :: Python :: Implementation ::", "loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs): return io.open( join(dirname(__file__),", "sys from setuptools import setup from setuptools.command.test import test as TestCommand class PyTest(TestCommand):", "TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a', \"Arguments to pass to py.test\")] def", "import setup from setuptools.command.test import test as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=',", "\"Intended Audience :: System Administrator\", \"Programming Language :: Python\", \"Programming Language :: Python", "test as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a', \"Arguments to pass to", "Python :: 3.4\", \"Programming Language :: Python :: Implementation :: CPython\", # \"Programming", ":: Implementation :: CPython\", # \"Programming Language :: Python :: Implementation :: PyPy\",", "user_options = [('pytest-args=', 'a', \"Arguments to pass to py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args", "read(*names, **kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\",", "Python :: Implementation :: CPython\", # \"Programming Language :: Python :: Implementation ::", "r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[", "Administrator\", \"Programming Language :: Python\", \"Programming Language :: Python :: 2.6\", \"Programming Language", ":: Microsoft :: Windows\", \"Environment :: Console\", # \"Intended Audience :: System Administrator\",", "from setuptools import setup from setuptools.command.test import test as TestCommand class PyTest(TestCommand): user_options", ":: PyPy\", \"Topic :: Utilities\", ], keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\", ],", "*names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool for managing envdirs", "System :: Microsoft :: Windows\", \"Environment :: Console\", # \"Intended Audience :: System", "errno = pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\")", "import join from os.path import splitext import sys from setuptools import setup from", "run_tests(self): # import here, cause outside the eggs aren't loaded import pytest errno", "initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite =", "files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i", "py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers", "Language :: Python :: 3.4\", \"Programming Language :: Python :: Implementation :: CPython\",", "list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2 - Pre-Alpha\", \"Intended Audience :: Developers\", \"License", "os.path import basename from os.path import dirname from os.path import join from os.path", ":: 3.4\", \"Programming Language :: Python :: Implementation :: CPython\", # \"Programming Language", "Developers\", \"License :: OSI Approved :: BSD License\", \"Operating System :: Unix\", \"Operating", "here, cause outside the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno)", "\"Arguments to pass to py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self):", "= True def run_tests(self): # import here, cause outside the eggs aren't loaded", "import io import re from os.path import basename from os.path import dirname from", "glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2", "to py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args =", "= pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read()", "# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2 - Pre-Alpha\", \"Intended Audience", "\"License :: OSI Approved :: BSD License\", \"Operating System :: Unix\", \"Operating System", ":: CPython\", # \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic ::", "encoding: utf-8 -*- import glob import io import re from os.path import basename", "2.6\", \"Programming Language :: Python :: 2.7\", \"Programming Language :: Python :: 3\",", "], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ # eg: 'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\":", "classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2 - Pre-Alpha\", \"Intended Audience :: Developers\",", "- Pre-Alpha\", \"Intended Audience :: Developers\", \"License :: OSI Approved :: BSD License\",", "PyTest(TestCommand): user_options = [('pytest-args=', 'a', \"Arguments to pass to py.test\")] def initialize_options(self): TestCommand.initialize_options(self)", "include_package_data=True, zip_safe=False, classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2 -", "\"Programming Language :: Python :: 2.7\", \"Programming Language :: Python :: 3\", \"Programming", "import basename from os.path import dirname from os.path import join from os.path import", "extras_require={ # eg: 'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\", ] }, cmdclass={'test':", "def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import", ":: Implementation :: PyPy\", \"Topic :: Utilities\", ], keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\",", "[] self.test_suite = True def run_tests(self): # import here, cause outside the eggs", "Microsoft :: Windows\", \"Environment :: Console\", # \"Intended Audience :: System Administrator\", \"Programming", "System Administrator\", \"Programming Language :: Python\", \"Programming Language :: Python :: 2.6\", \"Programming", ":: Python\", \"Programming Language :: Python :: 2.6\", \"Programming Language :: Python ::", "and env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0]", "from setuptools.command.test import test as TestCommand class PyTest(TestCommand): user_options = [('pytest-args=', 'a', \"Arguments", "# import here, cause outside the eggs aren't loaded import pytest errno =", "Python :: 3.3\", \"Programming Language :: Python :: 3.4\", \"Programming Language :: Python", "os.path import dirname from os.path import join from os.path import splitext import sys", "True def run_tests(self): # import here, cause outside the eggs aren't loaded import", "sys.exit(errno) def read(*names, **kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\",", "import sys from setuptools import setup from setuptools.command.test import test as TestCommand class", ":: Python :: 3.3\", \"Programming Language :: Python :: 3.4\", \"Programming Language ::", "the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs):", "pytest errno = pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\",", "\"Programming Language :: Python :: 3\", \"Programming Language :: Python :: 3.3\", \"Programming", ":: Python :: 2.6\", \"Programming Language :: Python :: 2.7\", \"Programming Language ::", "% (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")],", "envdirs and env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\",", ":: 2.6\", \"Programming Language :: Python :: 2.7\", \"Programming Language :: Python ::", ":: OSI Approved :: BSD License\", \"Operating System :: Unix\", \"Operating System ::", "eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs): return", "# -*- encoding: utf-8 -*- import glob import io import re from os.path", "Python :: 2.7\", \"Programming Language :: Python :: 3\", \"Programming Language :: Python", "eg: 'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\", ] }, cmdclass={'test': PyTest}, tests_require=[", "# \"Operating System :: Microsoft :: Windows\", \"Environment :: Console\", # \"Intended Audience", "outside the eggs aren't loaded import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) def read(*names,", "name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool for managing envdirs and env files.\", long_description=\"%s\\n%s\" %", "return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A tool", "TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True", "**kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\", license=\"BSD\", description=\"A", "http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2 - Pre-Alpha\", \"Intended Audience :: Developers\", \"License ::", "classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status :: 2 - Pre-Alpha\", \"Intended", "3.4\", \"Programming Language :: Python :: Implementation :: CPython\", # \"Programming Language ::", ":: Utilities\", ], keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\",", "}, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\", ] }, cmdclass={'test': PyTest}, tests_require=[ \"pytest>=2.7.2\", ] )", "from os.path import splitext import sys from setuptools import setup from setuptools.command.test import", "Audience :: System Administrator\", \"Programming Language :: Python\", \"Programming Language :: Python ::", "description=\"A tool for managing envdirs and env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\",", "self.test_args = [] self.test_suite = True def run_tests(self): # import here, cause outside", "os.path import splitext import sys from setuptools import setup from setuptools.command.test import test", "= [] def finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self):", ":: BSD License\", \"Operating System :: Unix\", \"Operating System :: POSIX\", # \"Operating", "dirname from os.path import join from os.path import splitext import sys from setuptools", "Language :: Python :: Implementation :: PyPy\", \"Topic :: Utilities\", ], keywords=[ \"environment\",", "OSI Approved :: BSD License\", \"Operating System :: Unix\", \"Operating System :: POSIX\",", "Status :: 2 - Pre-Alpha\", \"Intended Audience :: Developers\", \"License :: OSI Approved", "in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ # complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers \"Development Status ::", "for managing envdirs and env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\",", "def run_tests(self): # import here, cause outside the eggs aren't loaded import pytest", "def read(*names, **kwargs): return io.open( join(dirname(__file__), *names), encoding=kwargs.get(\"encoding\", \"utf8\") ).read() setup( name=\"envtool\", version=\"0.1.0\",", "Python :: Implementation :: PyPy\", \"Topic :: Utilities\", ], keywords=[ \"environment\", \"envdir\", \"honcho\",", "\"envdir\", \"honcho\", \"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ # eg: 'rst':", "Console\", # \"Intended Audience :: System Administrator\", \"Programming Language :: Python\", \"Programming Language", "tool for managing envdirs and env files.\", long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))),", "'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\", ] }, cmdclass={'test': PyTest}, tests_require=[ \"pytest>=2.7.2\",", ":: Console\", # \"Intended Audience :: System Administrator\", \"Programming Language :: Python\", \"Programming", "keywords=[ \"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ #", "\"Programming Language :: Python\", \"Programming Language :: Python :: 2.6\", \"Programming Language ::", "finalize_options(self): TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): # import here,", "Language :: Python :: 3\", \"Programming Language :: Python :: 3.3\", \"Programming Language", "import dirname from os.path import join from os.path import splitext import sys from", ":: Python :: Implementation :: CPython\", # \"Programming Language :: Python :: Implementation", "# \"Intended Audience :: System Administrator\", \"Programming Language :: Python\", \"Programming Language ::", ":: Developers\", \"License :: OSI Approved :: BSD License\", \"Operating System :: Unix\",", "\"environment\", \"envdir\", \"honcho\", \"foreman\", \"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ # eg:", "Python\", \"Programming Language :: Python :: 2.6\", \"Programming Language :: Python :: 2.7\",", "import pytest errno = pytest.main(self.pytest_args) sys.exit(errno) def read(*names, **kwargs): return io.open( join(dirname(__file__), *names),", "], extras_require={ # eg: 'rst': [\"docutils>=0.11\"], }, entry_points={ \"console_scripts\": [ \"envtool=envtool:main\", ] },", "\"env\", ], install_requires=[ \"future>=0.15.0\", \"click>=4.0.0\", ], extras_require={ # eg: 'rst': [\"docutils>=0.11\"], }, entry_points={", "long_description=\"%s\\n%s\" % (read(\"README.rst\"), re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in", "2 - Pre-Alpha\", \"Intended Audience :: Developers\", \"License :: OSI Approved :: BSD", "[('pytest-args=', 'a', \"Arguments to pass to py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = []", ":: Unix\", \"Operating System :: POSIX\", # \"Operating System :: Microsoft :: Windows\",", "basename from os.path import dirname from os.path import join from os.path import splitext", ":: Windows\", \"Environment :: Console\", # \"Intended Audience :: System Administrator\", \"Programming Language", ":: 2.7\", \"Programming Language :: Python :: 3\", \"Programming Language :: Python ::", ":: 3.3\", \"Programming Language :: Python :: 3.4\", \"Programming Language :: Python ::", "author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False, classifiers=[ # complete classifier", "re.sub(\":obj:`~?(.*?)`\", r\"``\\1``\", read(\"CHANGELOG.rst\"))), author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/judy2k/envtool\", py_modules=[splitext(basename(i))[0] for i in glob.glob(\"*.py\")], include_package_data=True, zip_safe=False,", "to pass to py.test\")] def initialize_options(self): TestCommand.initialize_options(self) self.pytest_args = [] def finalize_options(self): TestCommand.finalize_options(self)" ]
[ "test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from io import StringIO from typing import", "0], [5, 5])) self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5, 5],", "def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions[0:3]: graph.put_region(region)", "test_regions: List[Region] def setUp(self): self.test_regions = [] self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2, 2],", "[2, 7])) def test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G, nx.Graph))", "graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r", "Regional Intersection Graph -- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph", "- test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from io import StringIO", "list(graph.regions)) def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1])", "import TestCase from pprint import pprint from networkx import networkx as nx from", "'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id,", "region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph", "python \"\"\" Unit tests for Regional Intersection Graph -- NetworkX - test_nxgraph_create -", "self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph =", "= self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph)", "for region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph", "= RIGraph(dimension=dimension) for region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self): dimension", "in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph =", "Unit tests for Regional Intersection Graph -- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor -", "test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions,", "RIGraph(dimension=dimension) for region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension", "test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection =", "= self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0])", "class TestRIGraph(TestCase): test_regions: List[Region] def setUp(self): self.test_regions = [] self.test_regions.append(Region([0, 0], [5, 5]))", "test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id", "'region':r} for r in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict,", "7])) def test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def", "self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph =", "= RIGraph(dimension=1) self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension", "intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id,", "RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension", "self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension", "'target': self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph", "}} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1])", "False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region':", "def setUp(self): self.test_regions = [] self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2, 2], [5, 10]))", "self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension)", "self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0])", "dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1])", "5], [2, 7])) def test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G,", "- test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from io import StringIO from typing", "Region class TestRIGraph(TestCase): test_regions: List[Region] def setUp(self): self.test_regions = [] self.test_regions.append(Region([0, 0], [5,", "from networkx import networkx as nx from slig.datastructs.rigraph import RIGraph from slig.datastructs.region import", "as nx from slig.datastructs.rigraph import RIGraph from slig.datastructs.region import Region class TestRIGraph(TestCase): test_regions:", "graph = RIGraph(dimension=dimension) for region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension", "TestRIGraph(TestCase): test_regions: List[Region] def setUp(self): self.test_regions = [] self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2,", "{'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r in graph.regions],", "= self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection,", "region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph =", "'graph':{ 'directed': False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r in graph.regions], 'links':[{'source':", "from slig.datastructs.rigraph import RIGraph from slig.datastructs.region import Region class TestRIGraph(TestCase): test_regions: List[Region] def", "nx.Graph)) def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions[0:3]:", "self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def", "7])) self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5, 5], [2, 7])) def test_nxgraph_create(self): graph =", "= self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r}", "'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension)", "in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension)", "\"\"\" Unit tests for Regional Intersection Graph -- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor", "tests for Regional Intersection Graph -- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor", "False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target':", "list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1])", "Graph -- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random", "self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0],", "networkx import networkx as nx from slig.datastructs.rigraph import RIGraph from slig.datastructs.region import Region", "from typing import List, Tuple from unittest import TestCase from pprint import pprint", "self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5, 5], [2, 7])) def", "for r in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict())", "#!/usr/bin/env python \"\"\" Unit tests for Regional Intersection Graph -- NetworkX - test_nxgraph_create", "import pprint from networkx import networkx as nx from slig.datastructs.rigraph import RIGraph from", "[] self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1, 5], [3, 7]))", "def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions: graph.put_region(region)", "2], [5, 10])) self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5, 5],", "= self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def", "'directed': False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r in graph.regions], 'links':[{'source': self.test_regions[0].id,", "= RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self):", "graph.to_dict()) def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1])", "in graph) def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in", "self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0],", "graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph': False,", "r in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def", "self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for", "self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5,", "test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection =", "from pprint import pprint from networkx import networkx as nx from slig.datastructs.rigraph import", "from io import StringIO from typing import List, Tuple from unittest import TestCase", "self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in", "def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection", "import RIGraph from slig.datastructs.region import Region class TestRIGraph(TestCase): test_regions: List[Region] def setUp(self): self.test_regions", "graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link',", "graph = RIGraph(dimension=dimension) for region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self):", "= {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph': False, 'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r in", "graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph", "graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph':", "graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension", "[3, 7])) self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5, 5], [2, 7])) def test_nxgraph_create(self): graph", "self.test_regions.append(Region([-5, 5], [2, 7])) def test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G is not None)", "self.test_regions = [] self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1, 5],", "= self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict", "= RIGraph(dimension=dimension) for region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension =", "unittest import TestCase from pprint import pprint from networkx import networkx as nx", "self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5, 5], [2, 7])) def test_nxgraph_create(self): graph = RIGraph(dimension=1)", "Intersection Graph -- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph -", "\"\"\" from io import StringIO from typing import List, Tuple from unittest import", "RIGraph(dimension=dimension) for region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self): dimension =", "7])) self.test_regions.append(Region([-5, 5], [2, 7])) def test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G is not", "self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self):", "NetworkX - test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from", "nx from slig.datastructs.rigraph import RIGraph from slig.datastructs.region import Region class TestRIGraph(TestCase): test_regions: List[Region]", "io import StringIO from typing import List, Tuple from unittest import TestCase from", "import networkx as nx from slig.datastructs.rigraph import RIGraph from slig.datastructs.region import Region class", "test_nxgraph_sweepctor_random \"\"\" from io import StringIO from typing import List, Tuple from unittest", "RIGraph from slig.datastructs.region import Region class TestRIGraph(TestCase): test_regions: List[Region] def setUp(self): self.test_regions =", "test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from io import", "self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False, 'multigraph': False, 'graph':{},", "intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension)", "= RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{", "- test_nxgraph_sweepctor_random \"\"\" from io import StringIO from typing import List, Tuple from", "self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph =", "dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in", "def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) self.assertEqual(graph.to_dict(),", "slig.datastructs.region import Region class TestRIGraph(TestCase): test_regions: List[Region] def setUp(self): self.test_regions = [] self.test_regions.append(Region([0,", "from unittest import TestCase from pprint import pprint from networkx import networkx as", "graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension =", "StringIO from typing import List, Tuple from unittest import TestCase from pprint import", "= [] self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1, 5], [3,", "test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) self.assertEqual(graph.to_dict(), RIGraph.from_dict(graph.to_dict()).to_dict())", "intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0])", "- test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from io", "graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension", "networkx as nx from slig.datastructs.rigraph import RIGraph from slig.datastructs.region import Region class TestRIGraph(TestCase):", "self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0])", "dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions: graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions))", "self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5,", "in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self):", "test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from io import StringIO from typing import List, Tuple", "for region in self.test_regions[0:3]: graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension", "5], [3, 7])) self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5, 5], [2, 7])) def test_nxgraph_create(self):", "'nodes':[{'id':r.id, 'region':r} for r in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}] }}", "[5, 10])) self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5, 5], [2,", "5])) self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5, 5], [1, 7]))", "None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region", "slig.datastructs.rigraph import RIGraph from slig.datastructs.region import Region class TestRIGraph(TestCase): test_regions: List[Region] def setUp(self):", "graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed': False,", "test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension", "for Regional Intersection Graph -- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor -", "test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from io import StringIO from", "graph.put_region(region) self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for", "self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0],", "def test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self):", "not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for", "Tuple from unittest import TestCase from pprint import pprint from networkx import networkx", "graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def", "import StringIO from typing import List, Tuple from unittest import TestCase from pprint", "[5, 5])) self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5, 5], [1,", "pprint from networkx import networkx as nx from slig.datastructs.rigraph import RIGraph from slig.datastructs.region", "graph.put_region(region) self.assertEqual(self.test_regions, list(graph.regions)) def test_nxgraph_put_intersect(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1])", "- test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\" from io import StringIO from typing import List,", "typing import List, Tuple from unittest import TestCase from pprint import pprint from", "import List, Tuple from unittest import TestCase from pprint import pprint from networkx", "RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict = {'id':graph.id,'dimension':dimension,'json_graph':'node_link', 'graph':{ 'directed':", "self.assertTrue(self.test_regions[0].id in graph) def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region", "setUp(self): self.test_regions = [] self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2, 2], [5, 10])) self.test_regions.append(Region([1,", "List, Tuple from unittest import TestCase from pprint import pprint from networkx import", "is not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension)", "import Region class TestRIGraph(TestCase): test_regions: List[Region] def setUp(self): self.test_regions = [] self.test_regions.append(Region([0, 0],", "pprint import pprint from networkx import networkx as nx from slig.datastructs.rigraph import RIGraph", "from slig.datastructs.region import Region class TestRIGraph(TestCase): test_regions: List[Region] def setUp(self): self.test_regions = []", "graph = RIGraph(dimension=1) self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension =", "10])) self.test_regions.append(Region([1, 5], [3, 7])) self.test_regions.append(Region([-5, 5], [1, 7])) self.test_regions.append(Region([-5, 5], [2, 7]))", "graph) def test_nxgraph_put_region(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) for region in self.test_regions:", "List[Region] def setUp(self): self.test_regions = [] self.test_regions.append(Region([0, 0], [5, 5])) self.test_regions.append(Region([2, 2], [5,", "RIGraph(dimension=1) self.assertTrue(graph.G is not None) self.assertTrue(isinstance(graph.G, nx.Graph)) def test_nxgraph_contains(self): dimension = self.test_regions[0].dimension graph", "5], [1, 7])) self.test_regions.append(Region([-5, 5], [2, 7])) def test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G", "'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}] }} self.assertEqual(graphdict, graph.to_dict()) def test_nxgraph_from_dict(self): dimension =", "[1, 7])) self.test_regions.append(Region([-5, 5], [2, 7])) def test_nxgraph_create(self): graph = RIGraph(dimension=1) self.assertTrue(graph.G is", "def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection", "self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1]) graph.put_intersection(self.test_regions[0], self.test_regions[1]) intersection = self.test_regions[0].get_intersection(self.test_regions[1]) graphdict =", "TestCase from pprint import pprint from networkx import networkx as nx from slig.datastructs.rigraph", "'graph':{}, 'nodes':[{'id':r.id, 'region':r} for r in graph.regions], 'links':[{'source': self.test_regions[0].id, 'target': self.test_regions[1].id, 'region': intersection}]", "-- NetworkX - test_nxgraph_create - test_nxgraph_sweepctor - test_nxgraph_mdsweepctor - test_nxgraph_sweepctor_graph - test_nxgraph_sweepctor_random \"\"\"", "self.test_regions[0].get_intersection(self.test_regions[1]) self.assertEqual(intersection, list(graph.intersections)[0]) def test_nxgraph_to_dict(self): dimension = self.test_regions[0].dimension graph = RIGraph(dimension=dimension) graph.put_region(self.test_regions[0]) graph.put_region(self.test_regions[1])" ]
[ "len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset def main(): args = parse_args()", "from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model = _model_to_gpu(model,", "data loaders test_loader, test_dataset = _make_test_data(cfg, logger) # Evaluate on validation set perf_indicator", "False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC", "# Initialise losses loss_func = _make_loss(cfg) # Initialise data loaders test_loader, test_dataset =", "torch import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import torch.utils.data import torch.utils.data.distributed", "{} {} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET, perf_indicator ) ) if __name__ ==", "object logger: logging object Returns: test_loader: Data Loader over test dataset test_dataset: test", "map_location=torch.device('cpu')) ) model = _model_to_gpu(model, cfg) # Initialise losses loss_func = _make_loss(cfg) #", "is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET, perf_indicator ) ) if __name__ == '__main__': main()", "= cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model from {}'.format(model_state_file)) if", "model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model = _model_to_gpu(model, cfg) # Initialise losses", "cfg: config object logger: logging object Returns: test_loader: Data Loader over test dataset", "cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise models model", "= os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict(", "# -*- coding: utf-8 -*- # Written by <NAME> (<EMAIL>) import os import", "import torch.backends.cudnn as cudnn import torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as", "wbia_orientation.config.default import _C as cfg # NOQA from wbia_orientation.config.default import update_config from wbia_orientation.core.function", "torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise models model = _make_model(cfg, is_train=False) # Load model", "loaders as per config parameters Input: cfg: config object logger: logging object Returns:", "validate( cfg, test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final results. Accuracy@{}", "import validate from wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train import", "logger.info('=> loading model from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) )", "_make_loss(cfg) # Initialise data loaders test_loader, test_dataset = _make_test_data(cfg, logger) # Evaluate on", "test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final results. Accuracy@{} on {}", "weights if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=> loading", "test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224,", "cfg) # Initialise losses loss_func = _make_loss(cfg) # Initialise data loaders test_loader, test_dataset", "model from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model =", "-*- coding: utf-8 -*- # Written by <NAME> (<EMAIL>) import os import pprint", "update_config(cfg, args) logger, final_output_dir = create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn", "is_train=False) # Load model weights if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else: model_state_file =", "validation set perf_indicator = validate( cfg, test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, )", "if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model = _model_to_gpu(model, cfg) #", "] ) test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS *", "transforms from wbia_orientation.config.default import _C as cfg # NOQA from wbia_orientation.config.default import update_config", "), ] ) test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS", "dataset test_dataset: test dataset object \"\"\" test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(),", "# Written by <NAME> (<EMAIL>) import os import pprint import torch import torch.nn.parallel", "results. Accuracy@{} on {} {} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET, perf_indicator ) )", "args) logger, final_output_dir = create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related", "= transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225],", "torchvision.transforms as transforms from wbia_orientation.config.default import _C as cfg # NOQA from wbia_orientation.config.default", "object Returns: test_loader: Data Loader over test dataset test_dataset: test dataset object \"\"\"", "import torchvision.transforms as transforms from wbia_orientation.config.default import _C as cfg # NOQA from", "import torch.utils.data.distributed import torchvision.transforms as transforms from wbia_orientation.config.default import _C as cfg #", "# Load model weights if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir,", "logging object Returns: test_loader: Data Loader over test dataset test_dataset: test dataset object", "perf_indicator = validate( cfg, test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final", "_make_loss from wbia_orientation.utils.utils import create_logger def _make_test_data(cfg, logger): \"\"\"Initialise train and validation loaders", "= _make_model(cfg, is_train=False) # Load model weights if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else:", "# Initialise data loaders test_loader, test_dataset = _make_test_data(cfg, logger) # Evaluate on validation", "wbia_orientation.config.default import update_config from wbia_orientation.core.function import validate from wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal", ") test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS),", "model = _make_model(cfg, is_train=False) # Load model weights if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE", "cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final results. Accuracy@{} on {} {} is {:.2%}'.format( cfg.TEST.THETA_THR,", "std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader", "# Evaluate on validation set perf_indicator = validate( cfg, test_loader, test_dataset, model, loss_func,", "NOQA from wbia_orientation.config.default import update_config from wbia_orientation.core.function import validate from wbia_orientation.dataset import custom_transforms", "def _make_test_data(cfg, logger): \"\"\"Initialise train and validation loaders as per config parameters Input:", "import _C as cfg # NOQA from wbia_orientation.config.default import update_config from wbia_orientation.core.function import", "else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model = _model_to_gpu(model, cfg) # Initialise losses loss_func", "cudnn import torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms from wbia_orientation.config.default", "os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file,", "loss_func = _make_loss(cfg) # Initialise data loaders test_loader, test_dataset = _make_test_data(cfg, logger) #", "from wbia_orientation.core.function import validate from wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal import AnimalDataset from", "model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final results. Accuracy@{} on {} {} is", "test_loader, test_dataset = _make_test_data(cfg, logger) # Evaluate on validation set perf_indicator = validate(", "import AnimalDataset from wbia_orientation.train import parse_args, _make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils import create_logger", "Initialise data loaders test_loader, test_dataset = _make_test_data(cfg, logger) # Evaluate on validation set", "_make_test_data(cfg, logger) # Evaluate on validation set perf_indicator = validate( cfg, test_loader, test_dataset,", "wbia_orientation.utils.utils import create_logger def _make_test_data(cfg, logger): \"\"\"Initialise train and validation loaders as per", "cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model from {}'.format(model_state_file)) if cfg.USE_GPU:", "test dataset test_dataset: test dataset object \"\"\" test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE),", "0.456, 0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET,", "\"\"\"Initialise train and validation loaders as per config parameters Input: cfg: config object", "cfg # NOQA from wbia_orientation.config.default import update_config from wbia_orientation.core.function import validate from wbia_orientation.dataset", "_C as cfg # NOQA from wbia_orientation.config.default import update_config from wbia_orientation.core.function import validate", "= _model_to_gpu(model, cfg) # Initialise losses loss_func = _make_loss(cfg) # Initialise data loaders", "torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms", "num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset def main(): args = parse_args() update_config(cfg, args)", "from wbia_orientation.utils.utils import create_logger def _make_test_data(cfg, logger): \"\"\"Initialise train and validation loaders as", "custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] )", "AnimalDataset from wbia_orientation.train import parse_args, _make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils import create_logger def", "torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms from wbia_orientation.config.default import _C as cfg", "= create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related setting cudnn.benchmark =", "parse_args, _make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils import create_logger def _make_test_data(cfg, logger): \"\"\"Initialise train", "# cudnn related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED", "return test_loader, test_dataset def main(): args = parse_args() update_config(cfg, args) logger, final_output_dir =", "Loader over test dataset test_dataset: test dataset object \"\"\" test_transform = transforms.Compose( [", "wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train import parse_args, _make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils import", "model_state_file = cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model from {}'.format(model_state_file))", "test_dataset: test dataset object \"\"\" test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize(", "[ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ),", "from wbia_orientation.train import parse_args, _make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils import create_logger def _make_test_data(cfg,", "model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model = _model_to_gpu(model, cfg) # Initialise losses loss_func =", "config parameters Input: cfg: config object logger: logging object Returns: test_loader: Data Loader", "train and validation loaders as per config parameters Input: cfg: config object logger:", "_make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils import create_logger def _make_test_data(cfg, logger): \"\"\"Initialise train and", "custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset =", "cfg.CUDNN.ENABLED # Initialise models model = _make_model(cfg, is_train=False) # Load model weights if", "create_logger def _make_test_data(cfg, logger): \"\"\"Initialise train and validation loaders as per config parameters", "import torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms from wbia_orientation.config.default import", "and validation loaders as per config parameters Input: cfg: config object logger: logging", "import create_logger def _make_test_data(cfg, logger): \"\"\"Initialise train and validation loaders as per config", "= cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise models model = _make_model(cfg, is_train=False) #", ") logger.info( 'Final results. Accuracy@{} on {} {} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET,", "test_transform) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return", "logger, final_output_dir = create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related setting", "# Initialise models model = _make_model(cfg, is_train=False) # Load model weights if cfg.TEST.MODEL_FILE:", "loaders test_loader, test_dataset = _make_test_data(cfg, logger) # Evaluate on validation set perf_indicator =", "model weights if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=>", "else: model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file))", "import torch import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import torch.utils.data import", "AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY,", "= validate( cfg, test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final results.", "args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic", "torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise models model = _make_model(cfg, is_train=False)", "torch.utils.data.distributed import torchvision.transforms as transforms from wbia_orientation.config.default import _C as cfg # NOQA", "test_dataset def main(): args = parse_args() update_config(cfg, args) logger, final_output_dir = create_logger(cfg, args.cfg,", "Initialise models model = _make_model(cfg, is_train=False) # Load model weights if cfg.TEST.MODEL_FILE: model_state_file", "custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ]", "args = parse_args() update_config(cfg, args) logger, final_output_dir = create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args))", "final_output_dir = create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related setting cudnn.benchmark", "= cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise models model =", "def main(): args = parse_args() update_config(cfg, args) logger, final_output_dir = create_logger(cfg, args.cfg, 'test',", "= _make_loss(cfg) # Initialise data loaders test_loader, test_dataset = _make_test_data(cfg, logger) # Evaluate", "import update_config from wbia_orientation.core.function import validate from wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal import", "main(): args = parse_args() update_config(cfg, args) logger, final_output_dir = create_logger(cfg, args.cfg, 'test', False)", "= torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset", "torch.backends.cudnn as cudnn import torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms", "cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise models model = _make_model(cfg, is_train=False) # Load", "test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False,", "as per config parameters Input: cfg: config object logger: logging object Returns: test_loader:", "as cfg # NOQA from wbia_orientation.config.default import update_config from wbia_orientation.core.function import validate from", "mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset = AnimalDataset(cfg,", "0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform)", "shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset def main(): args = parse_args() update_config(cfg,", "transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0],", "pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset def main(): args = parse_args() update_config(cfg, args) logger,", "object \"\"\" test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406],", "model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else:", "test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final results. Accuracy@{} on {} {}", "on validation set perf_indicator = validate( cfg, test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir,", "cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model = _model_to_gpu(model, cfg) # Initialise", "Returns: test_loader: Data Loader over test dataset test_dataset: test dataset object \"\"\" test_transform", "= _make_test_data(cfg, logger) # Evaluate on validation set perf_indicator = validate( cfg, test_loader,", "cfg, test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final results. Accuracy@{} on", "logger): \"\"\"Initialise train and validation loaders as per config parameters Input: cfg: config", "torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset def", "import parse_args, _make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils import create_logger def _make_test_data(cfg, logger): \"\"\"Initialise", "losses loss_func = _make_loss(cfg) # Initialise data loaders test_loader, test_dataset = _make_test_data(cfg, logger)", "setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise models", "related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise", "import pprint import torch import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import", ") return test_loader, test_dataset def main(): args = parse_args() update_config(cfg, args) logger, final_output_dir", "cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, )", "from wbia_orientation.config.default import update_config from wbia_orientation.core.function import validate from wbia_orientation.dataset import custom_transforms from", "loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info( 'Final results. Accuracy@{} on {} {} is {:.2%}'.format(", "import os import pprint import torch import torch.nn.parallel import torch.backends.cudnn as cudnn import", "create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK", "'Final results. Accuracy@{} on {} {} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET, perf_indicator )", "wbia_orientation.core.function import validate from wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train", "Accuracy@{} on {} {} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET, perf_indicator ) ) if", "_make_test_data(cfg, logger): \"\"\"Initialise train and validation loaders as per config parameters Input: cfg:", "test_loader: Data Loader over test dataset test_dataset: test dataset object \"\"\" test_transform =", "Input: cfg: config object logger: logging object Returns: test_loader: Data Loader over test", "cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model from", "torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms from wbia_orientation.config.default import _C", "parameters Input: cfg: config object logger: logging object Returns: test_loader: Data Loader over", "validation loaders as per config parameters Input: cfg: config object logger: logging object", "models model = _make_model(cfg, is_train=False) # Load model weights if cfg.TEST.MODEL_FILE: model_state_file =", "model = _model_to_gpu(model, cfg) # Initialise losses loss_func = _make_loss(cfg) # Initialise data", "{}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model = _model_to_gpu(model, cfg)", "import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms from wbia_orientation.config.default import _C as", "test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader,", ") model = _model_to_gpu(model, cfg) # Initialise losses loss_func = _make_loss(cfg) # Initialise", "on {} {} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET, perf_indicator ) ) if __name__", "as transforms from wbia_orientation.config.default import _C as cfg # NOQA from wbia_orientation.config.default import", "(<EMAIL>) import os import pprint import torch import torch.nn.parallel import torch.backends.cudnn as cudnn", "test dataset object \"\"\" test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485,", "Initialise losses loss_func = _make_loss(cfg) # Initialise data loaders test_loader, test_dataset = _make_test_data(cfg,", "Evaluate on validation set perf_indicator = validate( cfg, test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET,", "= cfg.CUDNN.ENABLED # Initialise models model = _make_model(cfg, is_train=False) # Load model weights", "validate from wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train import parse_args,", "from wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train import parse_args, _make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils", "\"\"\" test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229,", "_model_to_gpu(model, cfg) # Initialise losses loss_func = _make_loss(cfg) # Initialise data loaders test_loader,", "if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir, 'best.pth') logger.info('=> loading model", "cudnn related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED #", "torch.load(model_state_file, map_location=torch.device('cpu')) ) model = _model_to_gpu(model, cfg) # Initialise losses loss_func = _make_loss(cfg)", "per config parameters Input: cfg: config object logger: logging object Returns: test_loader: Data", "logger.info( 'Final results. Accuracy@{} on {} {} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET, perf_indicator", "config object logger: logging object Returns: test_loader: Data Loader over test dataset test_dataset:", "batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset def main(): args", "logger.info(cfg) # cudnn related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled =", "cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled = cfg.CUDNN.ENABLED # Initialise models model = _make_model(cfg,", "'best.pth') logger.info('=> loading model from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu'))", "coding: utf-8 -*- # Written by <NAME> (<EMAIL>) import os import pprint import", "custom_transforms from wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train import parse_args, _make_model, _model_to_gpu, _make_loss from", "test_dataset = _make_test_data(cfg, logger) # Evaluate on validation set perf_indicator = validate( cfg,", "loading model from {}'.format(model_state_file)) if cfg.USE_GPU: model.load_state_dict(torch.load(model_state_file)) else: model.load_state_dict( torch.load(model_state_file, map_location=torch.device('cpu')) ) model", "logger: logging object Returns: test_loader: Data Loader over test dataset test_dataset: test dataset", "input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader( test_dataset,", "import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import torch.utils.data import torch.utils.data.distributed import", "by <NAME> (<EMAIL>) import os import pprint import torch import torch.nn.parallel import torch.backends.cudnn", "_make_model(cfg, is_train=False) # Load model weights if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else: model_state_file", "Load model weights if cfg.TEST.MODEL_FILE: model_state_file = cfg.TEST.MODEL_FILE else: model_state_file = os.path.join(final_output_dir, 'best.pth')", "final_output_dir, ) logger.info( 'Final results. Accuracy@{} on {} {} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME,", "Written by <NAME> (<EMAIL>) import os import pprint import torch import torch.nn.parallel import", "dataset object \"\"\" test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0), custom_transforms.Resize(cfg.MODEL.IMSIZE), custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456,", "= parse_args() update_config(cfg, args) logger, final_output_dir = create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg)", "logger) # Evaluate on validation set perf_indicator = validate( cfg, test_loader, test_dataset, model,", "utf-8 -*- # Written by <NAME> (<EMAIL>) import os import pprint import torch", "-*- # Written by <NAME> (<EMAIL>) import os import pprint import torch import", "update_config from wbia_orientation.core.function import validate from wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal import AnimalDataset", "set perf_indicator = validate( cfg, test_loader, test_dataset, model, loss_func, cfg.DATASET.TEST_SET, final_output_dir, ) logger.info(", "test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset def main():", "over test dataset test_dataset: test dataset object \"\"\" test_transform = transforms.Compose( [ custom_transforms.CropObjectAlignedArea(noise=0.0),", "0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader(", "'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic =", "test_loader, test_dataset def main(): args = parse_args() update_config(cfg, args) logger, final_output_dir = create_logger(cfg,", "0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset = AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader =", "os import pprint import torch import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim", "import custom_transforms from wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train import parse_args, _make_model, _model_to_gpu, _make_loss", "= AnimalDataset(cfg, cfg.DATASET.TEST_SET, test_transform) test_loader = torch.utils.data.DataLoader( test_dataset, batch_size=cfg.TEST.BS * len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS,", "_model_to_gpu, _make_loss from wbia_orientation.utils.utils import create_logger def _make_test_data(cfg, logger): \"\"\"Initialise train and validation", "<NAME> (<EMAIL>) import os import pprint import torch import torch.nn.parallel import torch.backends.cudnn as", "pprint import torch import torch.nn.parallel import torch.backends.cudnn as cudnn import torch.optim import torch.utils.data", "wbia_orientation.train import parse_args, _make_model, _model_to_gpu, _make_loss from wbia_orientation.utils.utils import create_logger def _make_test_data(cfg, logger):", "custom_transforms.ToTensor(), custom_transforms.Normalize( mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], input_size=cfg.MODEL.IMSIZE[0], ), ] ) test_dataset", "from wbia_orientation.config.default import _C as cfg # NOQA from wbia_orientation.config.default import update_config from", "wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train import parse_args, _make_model, _model_to_gpu,", "parse_args() update_config(cfg, args) logger, final_output_dir = create_logger(cfg, args.cfg, 'test', False) logger.info(pprint.pformat(args)) logger.info(cfg) #", "as cudnn import torch.optim import torch.utils.data import torch.utils.data.distributed import torchvision.transforms as transforms from", "* len(cfg.GPUS), shuffle=False, num_workers=cfg.WORKERS, pin_memory=cfg.PIN_MEMORY, ) return test_loader, test_dataset def main(): args =", "logger.info(pprint.pformat(args)) logger.info(cfg) # cudnn related setting cudnn.benchmark = cfg.CUDNN.BENCHMARK torch.backends.cudnn.deterministic = cfg.CUDNN.DETERMINISTIC torch.backends.cudnn.enabled", "{} is {:.2%}'.format( cfg.TEST.THETA_THR, cfg.DATASET.NAME, cfg.DATASET.TEST_SET, perf_indicator ) ) if __name__ == '__main__':", "Data Loader over test dataset test_dataset: test dataset object \"\"\" test_transform = transforms.Compose(", "# NOQA from wbia_orientation.config.default import update_config from wbia_orientation.core.function import validate from wbia_orientation.dataset import", "from wbia_orientation.dataset import custom_transforms from wbia_orientation.dataset.animal import AnimalDataset from wbia_orientation.train import parse_args, _make_model," ]
[ "@staticmethod def legal_moves(): return [[x, 1] for x in range(-1, 2)] + [[0,", "class pawn(chess_piece): def avaliable_moves(self): raise NotImplementedError @staticmethod def legal_moves(): return [[x, 1] for", "avaliable_moves(self): raise NotImplementedError @staticmethod def legal_moves(): return [[x, 1] for x in range(-1,", "from ._piece import chess_piece class pawn(chess_piece): def avaliable_moves(self): raise NotImplementedError @staticmethod def legal_moves():", "._piece import chess_piece class pawn(chess_piece): def avaliable_moves(self): raise NotImplementedError @staticmethod def legal_moves(): return", "raise NotImplementedError @staticmethod def legal_moves(): return [[x, 1] for x in range(-1, 2)]", "NotImplementedError @staticmethod def legal_moves(): return [[x, 1] for x in range(-1, 2)] +", "def legal_moves(): return [[x, 1] for x in range(-1, 2)] + [[0, 2]]", "import chess_piece class pawn(chess_piece): def avaliable_moves(self): raise NotImplementedError @staticmethod def legal_moves(): return [[x,", "<gh_stars>0 from ._piece import chess_piece class pawn(chess_piece): def avaliable_moves(self): raise NotImplementedError @staticmethod def", "def avaliable_moves(self): raise NotImplementedError @staticmethod def legal_moves(): return [[x, 1] for x in", "pawn(chess_piece): def avaliable_moves(self): raise NotImplementedError @staticmethod def legal_moves(): return [[x, 1] for x", "chess_piece class pawn(chess_piece): def avaliable_moves(self): raise NotImplementedError @staticmethod def legal_moves(): return [[x, 1]" ]
[ "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"),", "), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream for which the editor is", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\",", "\"0059_auto_20200706_1659\"), ] operations = [ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for", "name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for which the editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]),", "\"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations = [ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The", "(\"users\", \"0059_auto_20200706_1659\"), ] operations = [ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s)", "name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream for which the editor is authorized.\", limit_choices_to=models.Q(partner__status__in=[0, 2]),", "migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for which the editor is authorized.\",", "[ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for which the editor is", "= [ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for which the editor", "3.0.9 on 2020-08-04 16:34 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "2020-08-04 16:34 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies =", "import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations", "django.db.models.deletion class Migration(migrations.Migration): dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations =", "the editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey(", "blank=True, help_text=\"The partner(s) for which the editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ),", "blank=True, help_text=\"The stream for which the editor is authorized.\", limit_choices_to=models.Q(partner__status__in=[0, 2]), null=True, on_delete=django.db.models.deletion.SET_NULL,", "Migration(migrations.Migration): dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations = [ migrations.AlterField(", "# Generated by Django 3.0.9 on 2020-08-04 16:34 from django.db import migrations, models", "to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream for which the", "field=models.ForeignKey( blank=True, help_text=\"The stream for which the editor is authorized.\", limit_choices_to=models.Q(partner__status__in=[0, 2]), null=True,", "help_text=\"The partner(s) for which the editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ),", "which the editor is authorized.\", limit_choices_to=models.Q(partner__status__in=[0, 2]), null=True, on_delete=django.db.models.deletion.SET_NULL, to=\"resources.Stream\", ), ), ]", "for which the editor is authorized.\", limit_choices_to=models.Q(partner__status__in=[0, 2]), null=True, on_delete=django.db.models.deletion.SET_NULL, to=\"resources.Stream\", ), ),", "on 2020-08-04 16:34 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies", "model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream for which the editor is authorized.\", limit_choices_to=models.Q(partner__status__in=[0,", "Django 3.0.9 on 2020-08-04 16:34 from django.db import migrations, models import django.db.models.deletion class", "authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream", "2]), to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream for which", "help_text=\"The stream for which the editor is authorized.\", limit_choices_to=models.Q(partner__status__in=[0, 2]), null=True, on_delete=django.db.models.deletion.SET_NULL, to=\"resources.Stream\",", "stream for which the editor is authorized.\", limit_choices_to=models.Q(partner__status__in=[0, 2]), null=True, on_delete=django.db.models.deletion.SET_NULL, to=\"resources.Stream\", ),", "by Django 3.0.9 on 2020-08-04 16:34 from django.db import migrations, models import django.db.models.deletion", "model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for which the editor is authorized.\", limit_choices_to=models.Q(status__in=[0,", "for which the editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\",", "which the editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\",", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ (\"resources\",", "editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True,", "] operations = [ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for which", "operations = [ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for which the", "16:34 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [", "field=models.ManyToManyField( blank=True, help_text=\"The partner(s) for which the editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\",", "migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream for which the editor is authorized.\",", "(\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations = [ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField( blank=True,", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"),", "is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The", "), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream for which the editor", "Generated by Django 3.0.9 on 2020-08-04 16:34 from django.db import migrations, models import", "class Migration(migrations.Migration): dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations = [", "dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations = [ migrations.AlterField( model_name=\"authorization\",", "models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ]", "partner(s) for which the editor is authorized.\", limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ), migrations.AlterField(", "[ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations = [ migrations.AlterField( model_name=\"authorization\", name=\"partners\", field=models.ManyToManyField(", "limit_choices_to=models.Q(status__in=[0, 2]), to=\"resources.Partner\", ), ), migrations.AlterField( model_name=\"authorization\", name=\"stream\", field=models.ForeignKey( blank=True, help_text=\"The stream for", "= [ (\"resources\", \"0083_auto_20200804_1634\"), (\"users\", \"0059_auto_20200706_1659\"), ] operations = [ migrations.AlterField( model_name=\"authorization\", name=\"partners\"," ]
[ "1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1,", "= Solution(value=4) # 1 3 0 2 4 test_s1 = Solution(value=2.1) test_s2 =", "1 3 0 2 4 test_s1 = Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3 =", "in range(len(self.__subset)): if x[i] == 1: countw += self.__weight[i] for j in range(len(self.__subset[i])):", "s0, s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2) == 5 and iset2[4].get_value() ==", "= 0 countw = 0 for i in range(len(self.__weight)): allweight += self.__weight[i] dims", "dim_size = 20 dim_regs = [[0, 1]] * dim_size dim_tys = [False] *", "test_sracos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1,", "dim objective = Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100 * dim, seed=77) solution =", "1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0,", "0, 1, 1, 0]) self.__subset.append([0, 0, 1, 1, 1, 1, 0, 1, 1,", "if x[i] == 1: countw += self.__weight[i] for j in range(len(self.__subset[i])): if self.__subset[i][j]", "Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim", "0, 0, 0, 1, 1, 0]) self.__subset.append([0, 0, 1, 1, 1, 1, 0,", "0, 0, 1, 0, 1, 0, 1, 0, 0]) self.__subset.append([0, 0, 1, 0,", "parameter)[0] assert solution.get_value() < 0.2 dim = 500 objective = Objective(ackley, Dimension(dim, [[-1,", "1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0,", "2 # discrete # setcover problem = SetCover() dim = problem.dim # the", "0, 0, 0, 1, 0]) self.__subset.append([1, 1, 1, 0, 1, 1, 0, 0,", "objective function parameter = Parameter(budget=budget, seed=777) sol = Opt.min(objective, parameter) assert sol.get_value() <", "1, 0, 0, 0, 0, 1, 0]) self.__subset.append([0, 1, 1, 1, 0, 0,", "class \"\"\" def __init__(self): self.__weight = [0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062,", "= Opt.min(objective, parameter) sol.print_solution() assert solution.get_value() < 2 # discrete # setcover problem", "1e-6) dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100", "ave_cos = sum([np.cos(2.0 * np.pi * (i - bias)) for i in x])", "range(len(self.__weight)): allweight += self.__weight[i] dims = [] for i in range(len(self.__subset[0])): dims.append(False) for", "0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0,", "0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1,", "value def sphere_discrete_order(solution): \"\"\" Sphere function for integer continuous optimization \"\"\" x =", "s1 = Solution(x=[2.2, 2.2, 2.2], value=1) s2 = Solution(x=[3, 3, 3], value=2) iset", "sracos.binary_search(set, test_s3, 0, 4) == 0 assert sracos.binary_search(set, test_s4, 0, 4) == 3", "value class SetCover: \"\"\" set cover problem for discrete optimization this problem has", "iset[4].get_value() == 3 iset2 = [s1, s3, s0, s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg')", "0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0,", "self.__subset.append([1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1,", "1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0,", "2.1 or iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1, 1], value=0)", "a Solution object :return: the value of f(x) \"\"\" x = solution.get_x() allweight", "= Objective(sphere_discrete_order, dim) # form up the objective function parameter = Parameter(budget=10000) sol", "1, 1, 1, 1, 1, 0, 0, 0, 0, 1]) self.__subset.append([0, 1, 1,", "dims[j] = True full = True for i in range(len(dims)): if dims[i] is", "1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0,", "1, 0, 0, 0, 1, 0, 1, 1, 0]) self.__subset.append([1, 0, 0, 0,", "np def ackley(solution): \"\"\" Ackley function for continuous optimization \"\"\" x = solution.get_x()", "dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim)) #", "solution): \"\"\" Objective function. :param solution: a Solution object :return: the value of", "1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0,", "1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1,", "test_sracos_distance(self): a = [2, 4] b = [5, 8] assert SRacos.distance(a, b) ==", "True full = True for i in range(len(dims)): if dims[i] is False: full", "dims[i] is False: full = False if full is False: countw += allweight", "class objective = Objective(problem.fx, dim) # form up the objective function budget =", "problem as a class \"\"\" def __init__(self): self.__weight = [0.8356, 0.5495, 0.4444, 0.7269,", "iset = [s0, s1, s2, s3, s4] sracos = SRacos() test_s1 = Solution(value=2.1)", "0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1]) self.__subset.append([0,", "0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1,", "0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1,", "2 # discrete # setcover problem = SetCover() dim_size = 20 one_dim =", "objective function parameter = Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol = ExpOpt.min(objective, parameter, repeat=1)[0]", "0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1]) self.__subset.append([1, 0,", "Dimension2(dim_list) # form up the dimension object objective = Objective(sphere_discrete_order, dim) # form", "1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1,", "solution: a Solution object :return: the value of f(x) \"\"\" x = solution.get_x()", "parameter = Parameter(budget=100 * dim, parallel=True, server_num=2, seed=2) # parameter = Parameter(budget=100 *", "= [(one_dim)] * dim_size dim = Dimension2(dim_list) # form up the dimension object", "dim is prepared by the class objective = Objective(problem.fx, dim) # form up", "1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([1, 0, 1, 0, 0,", "s1, s2, s3, s4] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos')", "Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter) assert solution.get_value() < 1.5 # discrete #", "0, 1, 1, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1,", "0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0,", "1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0,", "assert sracos.binary_search(set, test_s1, 0, 2) == 3 assert sracos.binary_search(set, test_s2, 0, 4) ==", "def test_sracos_replace(self): s0 = Solution(x=[0, 0, 0], value=0.5) s1 = Solution(x=[1, 1, 1],", "2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value() == 2.1 def test_sracos_replace(self): s0 =", "\"\"\" x = solution.get_x() value = sum([(i-2)*(i-2) for i in x]) return value", "parameter = Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol = ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value()", "Solution(x=[1, 1, 1], value=1) s2 = Solution(x=[2, 2, 2], value=2) s3 = Solution(x=[3,", "for solution in solution_list: value = solution.get_value() assert value < 0.2 # discrete", "= [s0, s1, s2] sracos = SRacos() test_s1 = Solution(x=[2.1, 2.1, 2.1], value=2.1)", "assert solution.get_value() < 1.5 # discrete # setcover problem = SetCover() dim_size =", "1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,", "s1, s4, s0] x = Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos = SRacos() sracos.replace(pos_set,", "Dimension, Parameter, Opt, ExpOpt, ValueType, Dimension2 import numpy as np def ackley(solution): \"\"\"", "[True] * dim)) # setup objective parameter = Parameter(budget=100 * dim, parallel=True, server_num=2,", "dim_tys = [False] * dim_size return Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object): def test_racos_common_extend(self):", "2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value() == 2.1 def test_sracos_replace(self): s0", "parallel=True, server_num=2, seed=777) sol = ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value() < 2 #", "2.1) def test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1, 1], value=0) s1 = Solution(x=[2.2, 2.2,", "s2 = Solution(x=[3, 3, 3], value=2) iset = [s0, s1, s2] sracos =", "1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1,", "def test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1, 1], value=0) s1 = Solution(x=[2.2, 2.2, 2.2],", "parameter = Parameter(budget=100 * dim, sequential=False, seed=1) solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value()", "1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0,", "= [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form up", "problem. :return: Dimension instance \"\"\" dim_size = 20 dim_regs = [[0, 1]] *", "dim_tys, order=dim_order) # form up the dimension object objective = Objective(sphere_discrete_order, dim) #", "setup objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1) solution = ExpOpt.min(objective, parameter)[0]", "self.__subset.append([0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1,", "Parameter(budget=budget, sequential=False, seed=777) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 2 #", "import numpy as np def ackley(solution): \"\"\" Ackley function for continuous optimization \"\"\"", "Objective(sphere_discrete_order, dim) # form up the objective function parameter = Parameter(budget=10000) sol =", "1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0,", "# init with init_samples solution_list = ExpOpt.min(objective, parameter, repeat=1) for solution in solution_list:", "with init_samples solution_list = ExpOpt.min(objective, parameter, repeat=1) for solution in solution_list: value =", "3 and pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x, 'neg', 'LM') assert neg_set[3].get_value() == 0.1", "1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1]) self.__subset.append([1,", "SRacos() assert sracos.binary_search(set, test_s1, 0, 4) == 3 assert sracos.binary_search(set, test_s1, 0, 2)", "1: countw += self.__weight[i] for j in range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j]", "dim = Dimension2(dim_list) # form up the dimension object objective = Objective(sphere_discrete_order, dim)", "0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0,", "0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0,", "1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1,", "allweight = 0 countw = 0 for i in range(len(self.__weight)): allweight += self.__weight[i]", "0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0,", "sequential=False, seed=1) solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500", "# dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim))", "[-1, 1], 1e-6) dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) parameter", "1.5 # discrete # setcover problem = SetCover() dim_size = 20 one_dim =", "parameter, repeat=1)[0] assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions", "1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1,", "sol.get_value() < 2 # sphere dim_size = 100 # dimensions one_dim = (ValueType.DISCRETE,", "s2] sracos = SRacos() test_s1 = Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1)", "Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter) assert", "\"\"\" x = solution.get_x() bias = 0.2 ave_seq = sum([(i - bias) *", "0.5884, 0.6289, 0.3008] self.__subset = [] self.__subset.append([0, 1, 0, 0, 0, 1, 0,", "assert sol.get_value() < 200 def test_sracos_performance(self): # continuous dim = 100 # dimension", "s1, s2] sracos = SRacos() test_s1 = Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0,", "c) is True def test_sracos_distance(self): a = [2, 4] b = [5, 8]", "== 3 iset2 = [s1, s3, s0, s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert", "1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1,", "for continuous optimization \"\"\" x = solution.get_x() bias = 0.2 ave_seq = sum([(i", "s3, s4] neg_set = [s2, s3, s1, s4, s0] x = Solution(x=[2.1, 2.1,", "and pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x, 'neg', 'LM') assert neg_set[3].get_value() == 0.1 def", "0, 1, 0, 0, 0, 1, 0]) self.__subset.append([1, 1, 1, 0, 1, 1,", "1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1,", "True) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) # form up the", "0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0,", "parameter = Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1) sol = ExpOpt.min(objective, parameter)[0] assert sol.get_value()", "optimization \"\"\" x = solution.get_x() bias = 0.2 ave_seq = sum([(i - bias)", "(ValueType.DISCRETE, [-10, 10], True) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) #", "def __init__(self): self.__weight = [0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293,", "= 100 # dimensions one_dim = (ValueType.DISCRETE, [-10, 10], True) dim_list = [(one_dim)]", "in x]) return value class SetCover: \"\"\" set cover problem for discrete optimization", "s1, s2, s3, s4] neg_set = [s2, s3, s1, s4, s0] x =", "dim) # form up the objective function budget = 100 * dim.get_size() #", "dim_size = 100 # dimensions one_dim = (ValueType.DISCRETE, [-10, 10], True) dim_list =", "sracos.binary_search(set, test_s4, 0, 4) == 3 def test_sracos_strategy_wr(self): s0 = Solution(value=0) s1 =", "== 2.1 def test_sracos_replace(self): s0 = Solution(x=[0, 0, 0], value=0.5) s1 = Solution(x=[1,", "0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1,", ":return: Dimension instance \"\"\" dim_size = 20 dim_regs = [[0, 1]] * dim_size", "(ValueType.DISCRETE, [0, 1], False) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) #", "len(iset2) == 5 and iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self): s0 = Solution(value=0) s1", "test_s2 = Solution(value=4.5) test_s3 = Solution(value=-1) test_s4 = Solution(value=2) set = [s0, s1,", "1, 0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([1, 0, 1,", "RacosCommon.is_distinct(seti, a) is False and RacosCommon.is_distinct(seti, c) is True def test_sracos_distance(self): a =", "one_dim = (ValueType.DISCRETE, [-10, 10], True) dim_list = [(one_dim)] * dim_size dim =", "objective function parameter = Parameter(budget=budget, sequential=False, seed=777) sol = Opt.min(objective, parameter) sol.print_solution() assert", "0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0,", "parameter) sol.print_solution() assert sol.get_value() < 200 def test_sracos_performance(self): # continuous dim = 100", "dim_size # dimension type : integer dim_order = [True] * dim_size dim =", "[(one_dim)] * dim_size dim = Dimension2(dim_list) # form up the dimension object objective", "cover problem. :return: Dimension instance \"\"\" dim_size = 20 dim_regs = [[0, 1]]", "\"\"\" Dimension of set cover problem. :return: Dimension instance \"\"\" dim_size = 20", "= Parameter(budget=10000, sequential=False, seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200", "1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 1,", "0, 1, 0, 0, 0, 0, 0, 0]) self.__subset.append([0, 1, 0, 0, 1,", "c = Solution(x=[3, 4, 5]) seti = [a, b] assert RacosCommon.is_distinct(seti, a) is", "1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0,", "dim)) # setup objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1) solution =", "* dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=100 *", "to the objective function parameter = Parameter(budget=budget, seed=777) sol = Opt.min(objective, parameter) assert", "0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1,", "* dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, seed=777)", "Solution(x=[2.2, 2.2, 2.2], value=1) s2 = Solution(x=[3, 3, 3], value=2) iset = [s0,", "0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1,", "1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0,", "== 2.1 and iset[4].get_value() == 3 iset2 = [s1, s3, s0, s2, s4]", "0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0])", "1, 0, 0, 0, 1, 1]) self.__subset.append([1, 0, 0, 0, 1, 1, 0,", "0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1,", "0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([1, 0, 1, 0,", "1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1,", "neg_set[3].get_value() == 0.1 def test_racos_performance(self): # continuous dim = 100 # dimension objective", "3, 3], value=3) s4 = Solution(x=[4, 4, 4], value=4) pos_set = [s0, s1,", "* dim)) # setup objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1) solution", "seed=77) solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500 one_dim", "500 one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] * dim objective", "1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0]) self.__subset.append([1,", "sracos.binary_search(set, test_s1, 0, 4) == 3 assert sracos.binary_search(set, test_s1, 0, 2) == 3", "0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1,", "0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0,", "sol.print_solution() assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions dim_regs", "3], value=2) iset = [s0, s1, s2] sracos = SRacos() test_s1 = Solution(x=[2.1,", "1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0,", "function parameter = Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def", "def ackley(solution): \"\"\" Ackley function for continuous optimization \"\"\" x = solution.get_x() bias", "* dim_size # dimension type : integer dim_order = [True] * dim_size dim", "= Opt.min(objective, parameter) assert sol.get_value() < 200 def test_sracos_performance2(self): # continuous dim =", "[True] * dim)) # setup objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective,", "200 def test_asracos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley,", "solution.get_x() bias = 0.2 ave_seq = sum([(i - bias) * (i - bias)", "= [s0, s1, s2, s3, s4] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset,", "2.2], value=1) s2 = Solution(x=[3, 3, 3], value=2) iset = [s0, s1, s2]", "= solution.get_x() allweight = 0 countw = 0 for i in range(len(self.__weight)): allweight", "== 3 and (iset[0].get_value() == 2.1 or iset[1].get_value() == 2.1 or iset[2].get_value() ==", "Solution(value=4.5) test_s3 = Solution(value=-1) test_s4 = Solution(value=2) set = [s0, s1, s2, s3,", "the dim is prepared by the class objective = Objective(problem.fx, dim) # form", "parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)]) # init with init_samples solution_list", "0.2 dim = 500 one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)]", "Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3 = Solution(value=-1) test_s4 = Solution(value=2) set = [s0,", "function parameter = Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1) sol = ExpOpt.min(objective, parameter)[0] assert", "in solution_list: value = solution.get_value() assert value < 0.2 # discrete # setcover", "Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value() == 2.1 def test_sracos_replace(self):", "and iset[1].get_value() == 1 and iset[2].get_value() == 2 \\ and iset[3].get_value() == 2.1", "0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0,", "1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1,", "Objective(problem.fx, dim) # form up the objective function budget = 100 * dim.get_size()", "form up the objective function budget = 100 * dim.get_size() # number of", "2) == 3 assert sracos.binary_search(set, test_s2, 0, 4) == 5 assert sracos.binary_search(set, test_s3,", "5 and iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self): s0 = Solution(value=0) s1 = Solution(value=1)", "1]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1,", "is False and RacosCommon.is_distinct(seti, c) is True def test_sracos_distance(self): a = [2, 4]", "function parameter = Parameter(budget=budget, sequential=False, seed=777) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value()", "dim_regs, dim_tys, order=dim_order) # form up the dimension object objective = Objective(sphere_discrete_order, dim)", "ackley(solution): \"\"\" Ackley function for continuous optimization \"\"\" x = solution.get_x() bias =", "1, 0]) self.__subset.append([1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1,", "dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=100 * dim,", "1, 1, 1, 1]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0,", "dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, seed=777) solution", "sequential=False, seed=777) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 2 # sphere", "0, 1, 0, 0]) self.__subset.append([0, 0, 0, 1, 0, 0, 1, 1, 0,", "function for integer continuous optimization \"\"\" x = solution.get_x() value = sum([(i-2)*(i-2) for", "len(x) ave_cos = sum([np.cos(2.0 * np.pi * (i - bias)) for i in", "= Solution(x=[1, 2, 3]) b = Solution(x=[2, 3, 4]) c = Solution(x=[3, 4,", "s4] neg_set = [s2, s3, s1, s4, s0] x = Solution(x=[2.1, 2.1, 2.1],", "- bias)) for i in x]) / len(x) value = -20 * np.exp(-0.2", "one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] * dim objective =", "dim_size return Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object): def test_racos_common_extend(self): a = [1, 2,", "0, 0, 1, 0, 0, 0, 0, 0, 0]) self.__subset.append([0, 1, 0, 0,", "and iset[3].get_value() == 2.1 and iset[4].get_value() == 3 iset2 = [s1, s3, s0,", "= 20 dim_regs = [[0, 1]] * dim_size dim_tys = [False] * dim_size", "setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective, parameter) sol.print_solution() assert", "as np def ackley(solution): \"\"\" Ackley function for continuous optimization \"\"\" x =", "or iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1, 1], value=0) s1", "4]) c = Solution(x=[3, 4, 5]) seti = [a, b] assert RacosCommon.is_distinct(seti, a)", "0, 1, 0, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 1, 0,", "- bias) for i in x]) / len(x) ave_cos = sum([np.cos(2.0 * np.pi", "self.__subset.append([0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0,", "1, 1, 1, 0, 0, 0, 0, 1]) self.__subset.append([0, 1, 1, 0, 1,", "1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0]) self.__subset.append([1,", "dim = 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim,", "100 * dim.get_size() # number of calls to the objective function parameter =", "0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1,", "# form up the objective function parameter = Parameter(budget=10000) sol = Opt.min(objective, parameter)", "1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0,", "Solution(value=2) iset = [s0, s1, s2] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset,", "some extra initialization tasks, thus we define this problem as a class \"\"\"", "1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1,", "[2, 4] b = [5, 8] assert SRacos.distance(a, b) == 5 def test_sracos_binary_search(self):", "seed=777) solution = Opt.min(objective, parameter) assert solution.get_value() < 1.5 # discrete # setcover", "0, 1, 1, 0, 0, 0, 1, 1, 1]) self.__subset.append([1, 0, 0, 1,", "of calls to the objective function parameter = Parameter(budget=budget, seed=777) sol = Opt.min(objective,", "1, 0, 0, 0, 0, 0, 0]) self.__subset.append([0, 1, 0, 0, 1, 0,", "== 0.1 sracos.replace(neg_set, x, 'neg', 'LM') assert neg_set[3].get_value() == 0.1 def test_racos_performance(self): #", "0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0,", "1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1,", "dim) # form up the objective function parameter = Parameter(budget=10000, sequential=False, seed=77) sol", "s4] sracos = SRacos() assert sracos.binary_search(set, test_s1, 0, 4) == 3 assert sracos.binary_search(set,", "0, 4) == 3 def test_sracos_strategy_wr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2", "1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0]) self.__subset.append([0,", "assert sracos.binary_search(set, test_s4, 0, 4) == 3 def test_sracos_strategy_wr(self): s0 = Solution(value=0) s1", "0, 1, 0, 0, 0, 1, 0, 1, 1, 0]) self.__subset.append([1, 0, 0,", "= solution.get_value() assert value < 0.2 # discrete # setcover problem = SetCover()", "ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500 dim_list = [(one_dim)] *", "0, 1, 0, 0, 0, 1, 0, 1]) self.__subset.append([0, 1, 1, 0, 1,", "# setup objective parameter = Parameter(budget=100 * dim, parallel=True, server_num=2, seed=2) # parameter", "self.__subset.append([0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0,", "1, 0]) self.__subset.append([0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1,", "True for i in range(len(dims)): if dims[i] is False: full = False if", "[0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962,", "continuous dim = 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]] *", "parameter) assert sol.get_value() < 200 def test_sracos_performance2(self): # continuous dim = 100 #", "2, 3, 4] def test_racos_common_is_distinct(self): a = Solution(x=[1, 2, 3]) b = Solution(x=[2,", "dim_size = 20 one_dim = (ValueType.DISCRETE, [0, 1], False) dim_list = [(one_dim)] *", "object :return: the value of f(x) \"\"\" x = solution.get_x() allweight = 0", "= Solution(x=[3, 3, 3], value=2) iset = [s0, s1, s2] sracos = SRacos()", "assert iset[2].get_value() == 2.1 def test_sracos_replace(self): s0 = Solution(x=[0, 0, 0], value=0.5) s1", "1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1,", "0, 0, 1, 0]) self.__subset.append([0, 1, 1, 1, 0, 0, 1, 0, 1,", "= 100 * dim.get_size() # number of calls to the objective function parameter", "0, 1, 0, 0, 1, 1, 1, 1]) self.__subset.append([0, 0, 1, 1, 0,", "x, 'pos', 'WR') assert pos_set[4].get_value() == 3 and pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x,", "0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0,", "* dim_size dim = Dimension2(dim_list) # form up the dimension object objective =", "0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1,", "for i in range(len(dims)): if dims[i] is False: full = False if full", "dim, [True] * dim)) # setup objective parameter = Parameter(budget=100 * dim, seed=77)", "function. :param solution: a Solution object :return: the value of f(x) \"\"\" x", "up the objective function parameter = Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value()", "extra initialization tasks, thus we define this problem as a class \"\"\" def", "tasks, thus we define this problem as a class \"\"\" def __init__(self): self.__weight", "1, 1, 0, 1, 0, 0, 0, 0, 1, 0]) self.__subset.append([0, 1, 1,", "(i - bias) for i in x]) / len(x) ave_cos = sum([np.cos(2.0 *", "1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1,", "server_num=2, seed=777) sol = ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value() < 2 # sphere", "1, 0, 0, 1, 0, 1]) self.__subset.append([1, 0, 0, 0, 1, 0, 0,", "objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=100 * dim, sequential=False,", "= (ValueType.DISCRETE, [-10, 10], True) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list)", "[True] * dim)) # setup objective parameter = Parameter(budget=100 * dim, seed=77) solution", "SRacos from zoopt import Solution, Objective, Dimension, Parameter, Opt, ExpOpt, ValueType, Dimension2 import", "Solution(value=4) # 1 3 0 2 4 test_s1 = Solution(value=2.1) test_s2 = Solution(value=4.5)", "3 0 2 4 test_s1 = Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3 = Solution(value=-1)", "s3, s4] sracos = SRacos() assert sracos.binary_search(set, test_s1, 0, 4) == 3 assert", "Parameter(budget=100 * dim, sequential=False, seed=1) solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2", "* dim.get_size() # number of calls to the objective function parameter = Parameter(budget=budget,", "1, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1,", "1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0,", "20.0 + np.e return value def sphere_discrete_order(solution): \"\"\" Sphere function for integer continuous", "0]) self.__subset.append([1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0,", "# form up the objective function parameter = Parameter(budget=10000, sequential=False, seed=77) sol =", "1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 0,", "= Dimension2(dim_list) # the dim is prepared by the class objective = Objective(problem.fx,", "and RacosCommon.is_distinct(seti, c) is True def test_sracos_distance(self): a = [2, 4] b =", "100 # dimensions one_dim = (ValueType.DISCRETE, [-10, 10], True) dim_list = [(one_dim)] *", "sequential=False, seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_sracos_performance(self):", "Dimension2(dim_list)) parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter) assert solution.get_value()", "/ len(x) ave_cos = sum([np.cos(2.0 * np.pi * (i - bias)) for i", "x[i] == 1: countw += self.__weight[i] for j in range(len(self.__subset[i])): if self.__subset[i][j] ==", "0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0,", "4] def test_racos_common_is_distinct(self): a = Solution(x=[1, 2, 3]) b = Solution(x=[2, 3, 4])", "countw += self.__weight[i] for j in range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j] =", "5]) seti = [a, b] assert RacosCommon.is_distinct(seti, a) is False and RacosCommon.is_distinct(seti, c)", "1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0,", "the objective function budget = 100 * dim.get_size() # number of calls to", "s3, s4] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset)", "0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0,", "bias)) for i in x]) / len(x) value = -20 * np.exp(-0.2 *", "0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1,", "TestRacos(object): def test_racos_common_extend(self): a = [1, 2, 3] b = [2, 3, 4]", "1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1,", "sol = ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value() < 2 # sphere dim_size =", "Opt.min(objective, parameter) assert solution.get_value() < 1.5 # discrete # setcover problem = SetCover()", "0, 1]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0,", "0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1,", "0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0,", "0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0,", "[(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=100", "= [] self.__subset.append([0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1,", "sol.get_value() < 2 # sphere dim_size = 100 # dimensions dim_regs = [[-10,", "0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1,", "2.1], value=0.1) sracos = SRacos() sracos.replace(pos_set, x, 'pos', 'WR') assert pos_set[4].get_value() == 3", "0.2 # discrete # setcover problem = SetCover() dim = problem.dim # the", "solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500 dim_list =", "s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2) == 5 and iset2[4].get_value() == 2.1", "'LM') assert neg_set[3].get_value() == 0.1 def test_racos_performance(self): # continuous dim = 100 #", "0.1786, 0.5884, 0.6289, 0.3008] self.__subset = [] self.__subset.append([0, 1, 0, 0, 0, 1,", "0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0,", "0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1,", "1]] * dim, [True] * dim)) # setup objective parameter = Parameter(budget=10000, seed=777)", "bias) * (i - bias) for i in x]) / len(x) ave_cos =", "3, 4] assert RacosCommon.extend(a, b) == [1, 2, 3, 2, 3, 4] def", "1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0,", "0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset = [] self.__subset.append([0, 1, 0, 0, 0,", "assert RacosCommon.is_distinct(seti, a) is False and RacosCommon.is_distinct(seti, c) is True def test_sracos_distance(self): a", "1]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1,", "1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0,", "repeat=1)[0] assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions dim_regs", "1]] * dim_size dim_tys = [False] * dim_size return Dimension(dim_size, dim_regs, dim_tys) class", "the objective function parameter = Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() <", "1, 0, 0, 0, 1, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1,", "0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1,", "= [2, 3, 4] assert RacosCommon.extend(a, b) == [1, 2, 3, 2, 3,", "solution.get_value() < 0.2 dim = 500 objective = Objective(ackley, Dimension(dim, [[-1, 1]] *", "0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1,", "return countw @property def dim(self): \"\"\" Dimension of set cover problem. :return: Dimension", "init_samples solution_list = ExpOpt.min(objective, parameter, repeat=1) for solution in solution_list: value = solution.get_value()", "1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0]) self.__subset.append([1, 0,", "assert sol.get_value() < 200 def test_racos_performance2(self): # continuous dim = 100 # dimension", "assert sol.get_value() < 200 def test_asracos_performance(self): # continuous dim = 100 # dimension", "4, 5]) seti = [a, b] assert RacosCommon.is_distinct(seti, a) is False and RacosCommon.is_distinct(seti,", "[True] * dim)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol =", "'neg', 'LM') assert neg_set[3].get_value() == 0.1 def test_racos_performance(self): # continuous dim = 100", "def test_racos_common_extend(self): a = [1, 2, 3] b = [2, 3, 4] assert", "dim = 500 dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) #", "sracos.binary_search(set, test_s1, 0, 2) == 3 assert sracos.binary_search(set, test_s2, 0, 4) == 5", "value=2) iset = [s0, s1, s2] sracos = SRacos() test_s1 = Solution(x=[2.1, 2.1,", "0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0,", "dim, [True] * dim)) # setup objective parameter = Parameter(budget=100 * dim, sequential=False,", "discrete optimization this problem has some extra initialization tasks, thus we define this", "= SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset) == 3 and (iset[0].get_value()", "0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754,", "set cover problem. :return: Dimension instance \"\"\" dim_size = 20 dim_regs = [[0,", "solution.get_value() < 1.5 # discrete # setcover problem = SetCover() dim_size = 20", "0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0,", "value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value() == 2.1 def test_sracos_replace(self): s0 = Solution(x=[0,", "0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1,", "0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1,", "= Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective, parameter) sol.print_solution() assert solution.get_value() < 2", "0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1,", "s3, s0, s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2) == 5 and iset2[4].get_value()", "zoopt import Solution, Objective, Dimension, Parameter, Opt, ExpOpt, ValueType, Dimension2 import numpy as", "3, 2, 3, 4] def test_racos_common_is_distinct(self): a = Solution(x=[1, 2, 3]) b =", "== 2.1 or iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1, 1],", "* dim_size # dimension range dim_tys = [False] * dim_size # dimension type", "Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500 one_dim = (ValueType.CONTINUOUS, [-1,", "# dimension range dim_tys = [False] * dim_size # dimension type : integer", "a = [1, 2, 3] b = [2, 3, 4] assert RacosCommon.extend(a, b)", "0 assert sracos.binary_search(set, test_s4, 0, 4) == 3 def test_sracos_strategy_wr(self): s0 = Solution(value=0)", "1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0,", "0]) self.__subset.append([0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1,", "= (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] * dim objective = Objective(ackley,", "1, 1, 1, 1, 0, 0]) self.__subset.append([1, 0, 1, 0, 0, 0, 1,", "a) is False and RacosCommon.is_distinct(seti, c) is True def test_sracos_distance(self): a = [2,", "[s0, s1, s2, s3, s4] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1,", "1e-6) dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective", "dim, seed=77) solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500", "dim) # form up the objective function parameter = Parameter(budget=10000) sol = Opt.min(objective,", "0, 1, 0, 0, 0, 1, 0, 0, 1, 1]) def fx(self, solution):", "1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0,", "Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def test_asracos_performance(self): # continuous", "1, 1, 1]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0,", "100)]) # init with init_samples solution_list = ExpOpt.min(objective, parameter, repeat=1) for solution in", "test_racos_common_extend(self): a = [1, 2, 3] b = [2, 3, 4] assert RacosCommon.extend(a,", "0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1,", "1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1,", "objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim)) # setup", "np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 + np.e return value def sphere_discrete_order(solution): \"\"\" Sphere", "# form up the objective function parameter = Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1)", "1]] * dim, [True] * dim)) # setup objective parameter = Parameter(budget=10000, sequential=False,", "0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1,", "parameter) assert solution.get_value() < 0.2 dim = 500 one_dim = (ValueType.CONTINUOUS, [-1, 1],", "iset[0].get_value() == 0 and iset[1].get_value() == 1 and iset[2].get_value() == 2 \\ and", "1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0,", "1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0,", "1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0])", "[] self.__subset.append([0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1,", "0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0,", "3, 3], value=2) iset = [s0, s1, s2] sracos = SRacos() test_s1 =", "test_s1, 'neg') assert len(iset2) == 5 and iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self): s0", "# sphere dim_size = 100 # dimensions one_dim = (ValueType.DISCRETE, [-10, 10], True)", "dim)) # setup objective parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective,", "1, 1, 0]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 1,", "1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0,", "test_s4 = Solution(value=2) set = [s0, s1, s2, s3, s4] sracos = SRacos()", "1, 1, 0, 0, 0, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1,", "test_sracos_strategy_wr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3)", "problem.dim # the dim is prepared by the class objective = Objective(problem.fx, dim)", "0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0,", "[1, 2, 3, 2, 3, 4] def test_racos_common_is_distinct(self): a = Solution(x=[1, 2, 3])", "0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1,", "value=0) s1 = Solution(x=[2.2, 2.2, 2.2], value=1) s2 = Solution(x=[3, 3, 3], value=2)", "1, 0, 0, 0, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 0,", "0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1,", "dim, [True] * dim)) # setup objective parameter = Parameter(budget=100 * dim, parallel=True,", "0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0,", "0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0,", "1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1]) self.__subset.append([0, 0,", "0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0,", "0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1,", "0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0,", "= Solution(x=[2.2, 2.2, 2.2], value=1) s2 = Solution(x=[3, 3, 3], value=2) iset =", "[s0, s1, s2, s3, s4] neg_set = [s2, s3, s1, s4, s0] x", "0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1,", "0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0,", "= Objective(sphere_discrete_order, dim) # form up the objective function parameter = Parameter(budget=10000, sequential=False,", "object objective = Objective(sphere_discrete_order, dim) # form up the objective function parameter =", "0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1,", "solution in solution_list: value = solution.get_value() assert value < 0.2 # discrete #", "seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_racos_performance2(self): #", "1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0,", "= Solution(value=1) s2 = Solution(value=2) iset = [s0, s1, s2] sracos = SRacos()", "= Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500 objective = Objective(ackley,", "countw = 0 for i in range(len(self.__weight)): allweight += self.__weight[i] dims = []", "0, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1,", "0, 1, 0, 1, 0, 0, 1]) self.__subset.append([0, 0, 0, 0, 0, 0,", "dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) # the dim is prepared", "0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0,", "= ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value() < 2 # sphere dim_size = 100", "value=2) s3 = Solution(x=[3, 3, 3], value=3) s4 = Solution(x=[4, 4, 4], value=4)", "< 0.2 # discrete # setcover problem = SetCover() dim = problem.dim #", "1.5 # discrete # setcover problem = SetCover() dim = problem.dim # the", "form up the objective function parameter = Parameter(budget=10000, sequential=False, seed=77) sol = Opt.min(objective,", "1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1,", "s3 = Solution(value=3) s4 = Solution(value=4) iset = [s0, s1, s2, s3, s4]", "1, 1]) self.__subset.append([1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1,", "form up the dimension object objective = Objective(sphere_discrete_order, dim) # form up the", "dim.get_size() # number of calls to the objective function parameter = Parameter(budget=budget, parallel=True,", "is False: full = False if full is False: countw += allweight return", "0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1]) self.__subset.append([0, 1,", "fx(self, solution): \"\"\" Objective function. :param solution: a Solution object :return: the value", "solution_list: value = solution.get_value() assert value < 0.2 # discrete # setcover problem", "s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) iset = [s0, s1,", "1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1])", "for integer continuous optimization \"\"\" x = solution.get_x() value = sum([(i-2)*(i-2) for i", "0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0])", "objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, seed=777) solution =", "0, 1, 1, 0, 0, 1]) self.__subset.append([0, 0, 1, 1, 1, 0, 1,", "numpy as np def ackley(solution): \"\"\" Ackley function for continuous optimization \"\"\" x", "Opt.min(objective, parameter) assert sol.get_value() < 200 def test_sracos_performance2(self): # continuous dim = 100", "0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0,", "parallel=True, server_num=2, seed=2) # parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)]) #", "0, 0, 0, 1, 0, 0, 1, 1]) def fx(self, solution): \"\"\" Objective", "is False: countw += allweight return countw @property def dim(self): \"\"\" Dimension of", "number of calls to the objective function parameter = Parameter(budget=budget, seed=777) sol =", "0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0]) self.__subset.append([1, 1,", "dim_tys) class TestRacos(object): def test_racos_common_extend(self): a = [1, 2, 3] b = [2,", "2, 3] b = [2, 3, 4] assert RacosCommon.extend(a, b) == [1, 2,", "a = Solution(x=[1, 2, 3]) b = Solution(x=[2, 3, 4]) c = Solution(x=[3,", "= Solution(x=[3, 4, 5]) seti = [a, b] assert RacosCommon.is_distinct(seti, a) is False", "= 20 one_dim = (ValueType.DISCRETE, [0, 1], False) dim_list = [(one_dim)] * dim_size", "Dimension(dim, [[-1, 1]] * dim, [True] * dim)) # setup objective parameter =", "* dim)) # setup objective parameter = Parameter(budget=100 * dim, parallel=True, server_num=2, seed=2)", "0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1,", "sol.get_value() < 200 def test_asracos_performance(self): # continuous dim = 100 # dimension objective", "1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0,", "4], value=4) pos_set = [s0, s1, s2, s3, s4] neg_set = [s2, s3,", "* dim, sequential=False, seed=1) solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim", "0.2 ave_seq = sum([(i - bias) * (i - bias) for i in", "1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1,", "0]) self.__subset.append([0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0,", "Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object): def test_racos_common_extend(self): a = [1, 2, 3] b", "0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1,", "Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form up the dimension object objective = Objective(sphere_discrete_order,", "0, 0, 1, 0, 0, 0, 0, 1, 1, 0]) self.__subset.append([0, 0, 1,", "test_s1 = Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3 = Solution(value=-1) test_s4 = Solution(value=2) set", "2.1 def test_sracos_replace(self): s0 = Solution(x=[0, 0, 0], value=0.5) s1 = Solution(x=[1, 1,", "= SetCover() dim = problem.dim # the dim is prepared by the class", "# dimensions one_dim = (ValueType.DISCRETE, [-10, 10], True) dim_list = [(one_dim)] * dim_size", "dim, parallel=True, server_num=2, seed=2) # parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)])", "s3 = Solution(x=[3, 3, 3], value=3) s4 = Solution(x=[4, 4, 4], value=4) pos_set", "0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0,", "parameter = Parameter(budget=budget, sequential=False, seed=777) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() <", "0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset = [] self.__subset.append([0,", "s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) iset = [s0, s1,", "= 0 for i in range(len(self.__weight)): allweight += self.__weight[i] dims = [] for", "1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1,", "0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1,", "= Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 2 # sphere dim_size = 100", "False) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) # the dim is", "1, 0, 0]) self.__subset.append([1, 0, 1, 0, 0, 0, 1, 0, 1, 1,", "1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1,", "Solution(x=[4, 4, 4], value=4) pos_set = [s0, s1, s2, s3, s4] neg_set =", "0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1])", "0, 4) == 0 assert sracos.binary_search(set, test_s4, 0, 4) == 3 def test_sracos_strategy_wr(self):", "by the class objective = Objective(problem.fx, dim) # form up the objective function", "up the dimension object objective = Objective(sphere_discrete_order, dim) # form up the objective", "s4 = Solution(x=[4, 4, 4], value=4) pos_set = [s0, s1, s2, s3, s4]", "0, 0, 1, 0, 0, 1, 1, 1, 1]) self.__subset.append([0, 0, 1, 1,", "assert solution.get_value() < 2 # discrete # setcover problem = SetCover() dim_size =", "SetCover: \"\"\" set cover problem for discrete optimization this problem has some extra", "x, 'neg', 'LM') assert neg_set[3].get_value() == 0.1 def test_racos_performance(self): # continuous dim =", "setup objective parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter) assert", "2.2, 2.2], value=1) s2 = Solution(x=[3, 3, 3], value=2) iset = [s0, s1,", "parameter) assert sol.get_value() < 200 def test_asracos_performance(self): # continuous dim = 100 #", "as a class \"\"\" def __init__(self): self.__weight = [0.8356, 0.5495, 0.4444, 0.7269, 0.9960,", "1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1,", "parameter, repeat=1) for solution in solution_list: value = solution.get_value() assert value < 0.2", "* dim objective = Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100 * dim, seed=77) solution", "Parameter, Opt, ExpOpt, ValueType, Dimension2 import numpy as np def ackley(solution): \"\"\" Ackley", "self.__subset[i][j] == 1: dims[j] = True full = True for i in range(len(dims)):", "* dim_size return Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object): def test_racos_common_extend(self): a = [1,", "Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500 objective = Objective(ackley, Dimension(dim,", "0, 1, 0]) self.__subset.append([0, 1, 1, 1, 0, 0, 1, 0, 1, 0,", "s1, s2, s3, s4] sracos = SRacos() assert sracos.binary_search(set, test_s1, 0, 4) ==", "Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset) == 5 and iset[0].get_value() == 0 and", "0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0,", "1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1,", "1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1,", "0, 0, 0, 0, 1, 0]) self.__subset.append([0, 1, 1, 1, 0, 0, 1,", "0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0,", "1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1,", "< 1.5 # discrete # setcover problem = SetCover() dim = problem.dim #", "Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) iset = [s0,", "1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1,", "Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) # 1 3", "* dim, parallel=True, server_num=2, seed=2) # parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] *", "0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1,", "0, 1]) self.__subset.append([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0,", "parameter) assert solution.get_value() < 1.5 # discrete # setcover problem = SetCover() dim_size", "0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1]) self.__subset.append([1,", "0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0,", "0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0,", "for j in range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j] = True full =", "1, 1]) self.__subset.append([1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,", "continuous dim = 100 # dimension one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list", "solution.get_value() assert value < 0.2 # discrete # setcover problem = SetCover() dim", "x = solution.get_x() allweight = 0 countw = 0 for i in range(len(self.__weight)):", "1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0]) self.__subset.append([0,", "0, 0]) self.__subset.append([0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1,", "[[-10, 10]] * dim_size # dimension range dim_tys = [False] * dim_size #", "parameter) assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions dim_regs", "1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([0, 0,", "s4, s0] x = Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos = SRacos() sracos.replace(pos_set, x,", "= Parameter(budget=100 * dim, sequential=False, seed=1) solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() <", "\"\"\" Ackley function for continuous optimization \"\"\" x = solution.get_x() bias = 0.2", "iset = [s0, s1, s2] sracos = SRacos() test_s1 = Solution(x=[2.1, 2.1, 2.1],", "= 500 dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) # setup", "1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1,", "0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1,", "dims.append(False) for i in range(len(self.__subset)): if x[i] == 1: countw += self.__weight[i] for", "3 assert sracos.binary_search(set, test_s1, 0, 2) == 3 assert sracos.binary_search(set, test_s2, 0, 4)", "sol.print_solution() assert solution.get_value() < 2 # discrete # setcover problem = SetCover() dim_size", "objective parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter) assert solution.get_value()", "0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1,", "sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value() == 2.1 def test_sracos_replace(self): s0 = Solution(x=[0, 0,", "* np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 + np.e return value def sphere_discrete_order(solution): \"\"\"", "a class \"\"\" def __init__(self): self.__weight = [0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633,", "1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1,", "= [s2, s3, s1, s4, s0] x = Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos", "1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0,", "= [1, 2, 3] b = [2, 3, 4] assert RacosCommon.extend(a, b) ==", "2.1 and iset[4].get_value() == 3 iset2 = [s1, s3, s0, s2, s4] sracos.strategy_wr(iset2,", "self.__subset.append([0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1,", "value=1) s2 = Solution(x=[3, 3, 3], value=2) iset = [s0, s1, s2] sracos", "0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0]) self.__subset.append([0,", "* (i - bias)) for i in x]) / len(x) value = -20", "value=4) pos_set = [s0, s1, s2, s3, s4] neg_set = [s2, s3, s1,", "4] b = [5, 8] assert SRacos.distance(a, b) == 5 def test_sracos_binary_search(self): s0", "= 100 # dimensions dim_regs = [[-10, 10]] * dim_size # dimension range", "iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1, 1], value=0) s1 =", "i in range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)): if x[i] == 1: countw", "1, 0, 1, 1, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1,", "== 2 \\ and iset[3].get_value() == 2.1 and iset[4].get_value() == 3 iset2 =", "[[0, 1]] * dim_size dim_tys = [False] * dim_size return Dimension(dim_size, dim_regs, dim_tys)", "100 # dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] *", "2.1 def test_sracos_strategy_rr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) iset", "1, 1, 1, 1, 0, 0, 0, 0, 1]) self.__subset.append([0, 1, 1, 0,", "test_s1, 0, 4) == 3 assert sracos.binary_search(set, test_s1, 0, 2) == 3 assert", "1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1,", "objective function parameter = Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1) sol = ExpOpt.min(objective, parameter)[0]", "0, 0, 1, 1]) self.__subset.append([1, 0, 0, 0, 1, 1, 0, 1, 1,", "[False] * dim_size return Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object): def test_racos_common_extend(self): a =", "= Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos = SRacos() sracos.replace(pos_set, x, 'pos', 'WR') assert", "1, 0, 0, 0, 0, 1, 1, 0]) self.__subset.append([0, 0, 1, 1, 1,", "0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0,", "assert SRacos.distance(a, b) == 5 def test_sracos_binary_search(self): s0 = Solution(value=0) s1 = Solution(value=1)", "== 2.1 or iset[1].get_value() == 2.1 or iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self): s0", "s2, s3, s4] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert", "sum([(i-2)*(i-2) for i in x]) return value class SetCover: \"\"\" set cover problem", "== 3 and pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x, 'neg', 'LM') assert neg_set[3].get_value() ==", "= Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) # 1 3 0 2", "np.pi * (i - bias)) for i in x]) / len(x) value =", "3 assert sracos.binary_search(set, test_s2, 0, 4) == 5 assert sracos.binary_search(set, test_s3, 0, 4)", "def test_racos_common_is_distinct(self): a = Solution(x=[1, 2, 3]) b = Solution(x=[2, 3, 4]) c", "s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4", "* dim_size dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form up the dimension", "Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective, parameter) sol.print_solution() assert solution.get_value() < 2 #", "instance \"\"\" dim_size = 20 dim_regs = [[0, 1]] * dim_size dim_tys =", "parameter) assert solution.get_value() < 1.5 # discrete # setcover problem = SetCover() dim", "0, 0, 1]) self.__subset.append([0, 0, 0, 0, 0, 0, 1, 1, 1, 1,", "5 assert sracos.binary_search(set, test_s3, 0, 4) == 0 assert sracos.binary_search(set, test_s4, 0, 4)", "1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0,", "1, 1, 0, 0, 0, 1, 1]) self.__subset.append([1, 0, 0, 0, 1, 1,", "1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0,", "0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1]) self.__subset.append([0, 0,", "def test_sracos_strategy_wr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3 =", "s1 = Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) #", "in range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j] = True full = True for", "test_sracos_binary_search(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3)", "# continuous dim = 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]]", "0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0])", "= [] for i in range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)): if x[i]", "0], value=0.5) s1 = Solution(x=[1, 1, 1], value=1) s2 = Solution(x=[2, 2, 2],", "= [s0, s1, s2] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert", "= SetCover() dim_size = 20 one_dim = (ValueType.DISCRETE, [0, 1], False) dim_list =", "= ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500 objective = Objective(ackley,", "1, 0, 1, 0, 0, 0, 0, 1, 0]) self.__subset.append([0, 1, 1, 1,", "x]) return value class SetCover: \"\"\" set cover problem for discrete optimization this", "0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0,", "0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1,", "* dim_size dim = Dimension2(dim_list) # the dim is prepared by the class", "1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0,", "Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1) solution", "1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0,", "1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 0,", "we define this problem as a class \"\"\" def __init__(self): self.__weight = [0.8356,", "value=3) s4 = Solution(x=[4, 4, 4], value=4) pos_set = [s0, s1, s2, s3,", "pos_set = [s0, s1, s2, s3, s4] neg_set = [s2, s3, s1, s4,", "objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol", "0, 1]) self.__subset.append([0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1,", "dim_regs = [[-10, 10]] * dim_size # dimension range dim_tys = [False] *", "= SRacos() sracos.replace(pos_set, x, 'pos', 'WR') assert pos_set[4].get_value() == 3 and pos_set[0].get_value() ==", "1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1,", "function parameter = Parameter(budget=budget, seed=777) sol = Opt.min(objective, parameter) assert sol.get_value() < 2", "= [s0, s1, s2, s3, s4] neg_set = [s2, s3, s1, s4, s0]", "for i in x]) / len(x) value = -20 * np.exp(-0.2 * np.sqrt(ave_seq))", "__init__(self): self.__weight = [0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355,", "1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1,", "the objective function parameter = Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1) sol = ExpOpt.min(objective,", "of f(x) \"\"\" x = solution.get_x() allweight = 0 countw = 0 for", "# number of calls to the objective function parameter = Parameter(budget=budget, seed=777) sol", "= sum([(i-2)*(i-2) for i in x]) return value class SetCover: \"\"\" set cover", "Dimension2 import numpy as np def ackley(solution): \"\"\" Ackley function for continuous optimization", "0, 1, 1, 0]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0,", "0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1,", "1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1,", "dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter", "1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1,", "1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1,", "1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1,", "1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1])", "= 500 one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] * dim", "for i in x]) return value class SetCover: \"\"\" set cover problem for", "Opt.min(objective, parameter) sol.print_solution() assert solution.get_value() < 2 # discrete # setcover problem =", "1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1]) self.__subset.append([0,", "sphere dim_size = 100 # dimensions one_dim = (ValueType.DISCRETE, [-10, 10], True) dim_list", "20 one_dim = (ValueType.DISCRETE, [0, 1], False) dim_list = [(one_dim)] * dim_size dim", "1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0,", "1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1,", "def test_sracos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley, Dimension(dim,", "0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0,", "0, 1, 1, 1, 1]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1,", "value = -20 * np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 + np.e", "dimension one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] * dim objective", "0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1,", "iset2 = [s1, s3, s0, s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2) ==", "0, 2) == 3 assert sracos.binary_search(set, test_s2, 0, 4) == 5 assert sracos.binary_search(set,", "number of calls to the objective function parameter = Parameter(budget=budget, parallel=True, server_num=2, seed=777)", "0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1,", "1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1,", "= (ValueType.DISCRETE, [0, 1], False) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list)", "= Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) iset = [s0, s1, s2,", "3 iset2 = [s1, s3, s0, s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2)", "1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0,", "ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500 objective = Objective(ackley, Dimension(dim,", "0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1,", "sol = Opt.min(objective, parameter) sol.print_solution() assert solution.get_value() < 2 # discrete # setcover", "= problem.dim # the dim is prepared by the class objective = Objective(problem.fx,", "0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0,", "Dimension instance \"\"\" dim_size = 20 dim_regs = [[0, 1]] * dim_size dim_tys", "Sphere function for integer continuous optimization \"\"\" x = solution.get_x() value = sum([(i-2)*(i-2)", "1, 0, 1, 0, 0, 1, 0, 0]) self.__subset.append([0, 0, 0, 1, 0,", "0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1]) def", "= SRacos() assert sracos.binary_search(set, test_s1, 0, 4) == 3 assert sracos.binary_search(set, test_s1, 0,", "calls to the objective function parameter = Parameter(budget=budget, seed=777) sol = Opt.min(objective, parameter)", "1, 0, 1]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 0,", "test_s1) assert iset[2].get_value() == 2.1 def test_sracos_replace(self): s0 = Solution(x=[0, 0, 0], value=0.5)", "0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0,", "Solution(x=[3, 3, 3], value=2) iset = [s0, s1, s2] sracos = SRacos() test_s1", "0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1,", "pos_set[4].get_value() == 3 and pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x, 'neg', 'LM') assert neg_set[3].get_value()", "1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0,", "range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j] = True full = True for i", "def dim(self): \"\"\" Dimension of set cover problem. :return: Dimension instance \"\"\" dim_size", "* dim, [True] * dim)) # setup objective parameter = Parameter(budget=10000, seed=777) solution", "0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786,", "is prepared by the class objective = Objective(problem.fx, dim) # form up the", "from zoopt import Solution, Objective, Dimension, Parameter, Opt, ExpOpt, ValueType, Dimension2 import numpy", "objective function parameter = Parameter(budget=10000, sequential=False, seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert", "bias) for i in x]) / len(x) ave_cos = sum([np.cos(2.0 * np.pi *", "0, 0, 0, 1, 0, 0, 1, 0, 1]) self.__subset.append([1, 0, 0, 0,", "0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1,", "= [s1, s3, s0, s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2) == 5", "dim_size = 100 # dimensions dim_regs = [[-10, 10]] * dim_size # dimension", "1]) self.__subset.append([0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0,", "[True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form up the", "== 0.1 def test_racos_performance(self): # continuous dim = 100 # dimension objective =", "sol.print_solution() assert sol.get_value() < 200 def test_sracos_performance(self): # continuous dim = 100 #", "0, 0, 1, 0, 0, 1, 1]) def fx(self, solution): \"\"\" Objective function.", "0]) self.__subset.append([0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0,", "0, 0, 0, 0, 1, 0, 0, 1, 0, 1]) self.__subset.append([1, 0, 0,", "0.2 dim = 500 dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list))", "1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0,", "self.__weight[i] dims = [] for i in range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)):", "sracos.replace(neg_set, x, 'neg', 'LM') assert neg_set[3].get_value() == 0.1 def test_racos_performance(self): # continuous dim", "objective function parameter = Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() < 200", "= Solution(x=[2, 2, 2], value=2) s3 = Solution(x=[3, 3, 3], value=3) s4 =", "0, 1, 1, 1, 1, 0, 0]) self.__subset.append([1, 0, 1, 0, 0, 0,", "self.__subset.append([1, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0,", "== 1: dims[j] = True full = True for i in range(len(dims)): if", "0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1]) self.__subset.append([0,", "0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1,", "1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0,", "solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500 objective =", "0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 0,", "0, 1, 0, 0, 1, 0, 0, 0, 1, 0]) self.__subset.append([1, 1, 1,", "'WR') assert pos_set[4].get_value() == 3 and pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x, 'neg', 'LM')", "objective = Objective(sphere_discrete_order, dim) # form up the objective function parameter = Parameter(budget=10000,", "= [[0, 1]] * dim_size dim_tys = [False] * dim_size return Dimension(dim_size, dim_regs,", "1, 1, 0, 0]) self.__subset.append([1, 0, 1, 0, 0, 0, 1, 0, 1,", "1]) self.__subset.append([1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1,", "the objective function parameter = Parameter(budget=budget, seed=777) sol = Opt.min(objective, parameter) assert sol.get_value()", "RacosCommon.extend(a, b) == [1, 2, 3, 2, 3, 4] def test_racos_common_is_distinct(self): a =", "1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0,", "0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1,", "1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 0, 1, 0,", "range(len(self.__subset)): if x[i] == 1: countw += self.__weight[i] for j in range(len(self.__subset[i])): if", "Parameter(budget=100 * dim, parallel=True, server_num=2, seed=2) # parameter = Parameter(budget=100 * dim, init_samples=[Solution([0]", "test_s2, 0, 4) == 5 assert sracos.binary_search(set, test_s3, 0, 4) == 0 assert", "full = True for i in range(len(dims)): if dims[i] is False: full =", "False: countw += allweight return countw @property def dim(self): \"\"\" Dimension of set", "s2, s3, s4] neg_set = [s2, s3, s1, s4, s0] x = Solution(x=[2.1,", "0, 1, 0, 0, 0, 0, 1, 1, 0]) self.__subset.append([0, 0, 1, 1,", "# continuous dim = 100 # dimension one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6)", "1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0,", "parameter) sol.print_solution() assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions", "= 100 # dimension one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)]", "0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1,", "s2 = Solution(x=[2, 2, 2], value=2) s3 = Solution(x=[3, 3, 3], value=3) s4", "init_samples=[Solution([0] * 100)]) # init with init_samples solution_list = ExpOpt.min(objective, parameter, repeat=1) for", "assert solution.get_value() < 0.2 dim = 500 dim_list = [(one_dim)] * dim objective", "1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0,", "sol.get_value() < 200 def test_sracos_performance2(self): # continuous dim = 100 # dimension one_dim", "- np.exp(ave_cos) + 20.0 + np.e return value def sphere_discrete_order(solution): \"\"\" Sphere function", "0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0,", "0, 0, 1, 0]) self.__subset.append([1, 1, 1, 0, 1, 1, 0, 0, 0,", "1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1,", "1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1,", "= Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form up the dimension object objective =", "test_sracos_strategy_rr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) iset = [s0,", "2 4 test_s1 = Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3 = Solution(value=-1) test_s4 =", "the objective function parameter = Parameter(budget=10000, sequential=False, seed=77) sol = Opt.min(objective, parameter) sol.print_solution()", "set cover problem for discrete optimization this problem has some extra initialization tasks,", "= Solution(value=4) iset = [s0, s1, s2, s3, s4] sracos = SRacos() test_s1", "parameter = Parameter(budget=10000, sequential=False, seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() <", "sol.print_solution() assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions one_dim", "Opt.min(objective, parameter) assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions", "Dimension of set cover problem. :return: Dimension instance \"\"\" dim_size = 20 dim_regs", "1], False) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) # the dim", "0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1,", "1, 1, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0,", "0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0,", "assert sracos.binary_search(set, test_s2, 0, 4) == 5 assert sracos.binary_search(set, test_s3, 0, 4) ==", "'pos', 'WR') assert pos_set[4].get_value() == 3 and pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x, 'neg',", "assert solution.get_value() < 1.5 # discrete # setcover problem = SetCover() dim =", "[s0, s1, s2] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset)", "ExpOpt, ValueType, Dimension2 import numpy as np def ackley(solution): \"\"\" Ackley function for", "* dim_size dim_tys = [False] * dim_size return Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object):", "sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_sracos_performance(self): # continuous", "0, 0, 0, 1, 0, 1, 1, 0]) self.__subset.append([1, 0, 0, 0, 1,", "= Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_sracos_performance(self): # continuous dim", "2, 2], value=2) s3 = Solution(x=[3, 3, 3], value=3) s4 = Solution(x=[4, 4,", "1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1,", "test_s1, 0, 2) == 3 assert sracos.binary_search(set, test_s2, 0, 4) == 5 assert", "def fx(self, solution): \"\"\" Objective function. :param solution: a Solution object :return: the", "0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1,", "def test_asracos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley, Dimension(dim,", "0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0,", "1, 1, 0, 0, 1]) self.__subset.append([0, 0, 1, 1, 1, 0, 1, 1,", "= Solution(x=[2, 3, 4]) c = Solution(x=[3, 4, 5]) seti = [a, b]", "allweight return countw @property def dim(self): \"\"\" Dimension of set cover problem. :return:", "1, 0, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0,", "class TestRacos(object): def test_racos_common_extend(self): a = [1, 2, 3] b = [2, 3,", "SRacos() sracos.replace(pos_set, x, 'pos', 'WR') assert pos_set[4].get_value() == 3 and pos_set[0].get_value() == 0.1", "0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1,", "0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0,", "0, 0, 1, 1, 0, 0, 0, 1, 1, 1]) self.__subset.append([1, 0, 0,", "0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0,", "<filename>test/test_algos/test_opt_algorithm/test_racos/test_racos.py from zoopt.algos.opt_algorithms.racos.racos_common import RacosCommon from zoopt.algos.opt_algorithms.racos.sracos import SRacos from zoopt import Solution,", "0, 0, 0]) self.__subset.append([0, 1, 0, 0, 1, 0, 0, 0, 0, 1,", "ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value() < 2 # sphere dim_size = 100 #", "0, 0, 0, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 0, 0,", "seed=777) sol = ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value() < 2 # sphere dim_size", "1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1,", "+= allweight return countw @property def dim(self): \"\"\" Dimension of set cover problem.", "0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0])", "2 # sphere dim_size = 100 # dimensions dim_regs = [[-10, 10]] *", "< 2 # discrete # setcover problem = SetCover() dim_size = 20 one_dim", "function parameter = Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol = ExpOpt.min(objective, parameter, repeat=1)[0] assert", "test_sracos_replace(self): s0 = Solution(x=[0, 0, 0], value=0.5) s1 = Solution(x=[1, 1, 1], value=1)", "[5, 8] assert SRacos.distance(a, b) == 5 def test_sracos_binary_search(self): s0 = Solution(value=0) s1", "1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1,", "4] assert RacosCommon.extend(a, b) == [1, 2, 3, 2, 3, 4] def test_racos_common_is_distinct(self):", "# dimension one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] * dim", "value = sum([(i-2)*(i-2) for i in x]) return value class SetCover: \"\"\" set", "integer dim_order = [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order) #", "0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1,", "import SRacos from zoopt import Solution, Objective, Dimension, Parameter, Opt, ExpOpt, ValueType, Dimension2", "parameter = Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def test_asracos_performance(self):", "0, 1, 1, 0, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1,", "3, 4] def test_racos_common_is_distinct(self): a = Solution(x=[1, 2, 3]) b = Solution(x=[2, 3,", "and iset[4].get_value() == 3 iset2 = [s1, s3, s0, s2, s4] sracos.strategy_wr(iset2, test_s1,", "1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0,", "1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0,", "class SetCover: \"\"\" set cover problem for discrete optimization this problem has some", "Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos = SRacos() sracos.replace(pos_set, x, 'pos', 'WR') assert pos_set[4].get_value()", "thus we define this problem as a class \"\"\" def __init__(self): self.__weight =", "False and RacosCommon.is_distinct(seti, c) is True def test_sracos_distance(self): a = [2, 4] b", "0, 0, 1, 0, 1, 1, 0]) self.__subset.append([1, 0, 0, 0, 1, 0,", "Parameter(budget=10000, sequential=False, seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def", "form up the objective function parameter = Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1) sol", "# discrete # setcover problem = SetCover() dim_size = 20 one_dim = (ValueType.DISCRETE,", "and iset[2].get_value() == 2 \\ and iset[3].get_value() == 2.1 and iset[4].get_value() == 3", "= 0.2 ave_seq = sum([(i - bias) * (i - bias) for i", "= Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective,", "1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0,", "1, 0, 1, 0, 1, 0, 0, 1]) self.__subset.append([0, 0, 0, 0, 0,", "= [s0, s1, s2, s3, s4] sracos = SRacos() assert sracos.binary_search(set, test_s1, 0,", "len(x) value = -20 * np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 +", "test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset) == 3 and (iset[0].get_value() == 2.1", "* dim, [True] * dim)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1)", "seed=1) solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500 objective", ": integer dim_order = [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order)", "\"\"\" set cover problem for discrete optimization this problem has some extra initialization", "s0] x = Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos = SRacos() sracos.replace(pos_set, x, 'pos',", "0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0])", "test_s4, 0, 4) == 3 def test_sracos_strategy_wr(self): s0 = Solution(value=0) s1 = Solution(value=1)", "1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1,", "[a, b] assert RacosCommon.is_distinct(seti, a) is False and RacosCommon.is_distinct(seti, c) is True def", "1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1,", "Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter)", "0, 0, 1, 1, 0, 0, 1]) self.__subset.append([0, 0, 1, 1, 1, 0,", "0, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0,", "[s0, s1, s2, s3, s4] sracos = SRacos() assert sracos.binary_search(set, test_s1, 0, 4)", "1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0,", "\\ and iset[3].get_value() == 2.1 and iset[4].get_value() == 3 iset2 = [s1, s3,", "0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1,", "if dims[i] is False: full = False if full is False: countw +=", "0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1,", "4) == 0 assert sracos.binary_search(set, test_s4, 0, 4) == 3 def test_sracos_strategy_wr(self): s0", "1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1,", "0, 1, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 1, 1, 0,", "iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 =", "0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset = [] self.__subset.append([0, 1, 0,", "number of calls to the objective function parameter = Parameter(budget=budget, sequential=False, seed=777) sol", "= [(one_dim)] * dim_size dim = Dimension2(dim_list) # the dim is prepared by", "s3 = Solution(value=3) s4 = Solution(value=4) # 1 3 0 2 4 test_s1", "0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset = []", "* dim, [True] * dim)) # setup objective parameter = Parameter(budget=100 * dim,", "0]) self.__subset.append([0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0,", "i in x]) return value class SetCover: \"\"\" set cover problem for discrete", "100 # dimensions dim_regs = [[-10, 10]] * dim_size # dimension range dim_tys", "objective = Objective(sphere_discrete_order, dim) # form up the objective function parameter = Parameter(budget=10000)", "1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1,", "sphere_discrete_order(solution): \"\"\" Sphere function for integer continuous optimization \"\"\" x = solution.get_x() value", "== 3 assert sracos.binary_search(set, test_s1, 0, 2) == 3 assert sracos.binary_search(set, test_s2, 0,", "= [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter =", "len(iset) == 3 and (iset[0].get_value() == 2.1 or iset[1].get_value() == 2.1 or iset[2].get_value()", "dimensions one_dim = (ValueType.DISCRETE, [-10, 10], True) dim_list = [(one_dim)] * dim_size dim", "1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 1,", "Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) # 1 3 0 2 4", "# dimension type : integer dim_order = [True] * dim_size dim = Dimension(dim_size,", "0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1,", "x = solution.get_x() bias = 0.2 ave_seq = sum([(i - bias) * (i", "problem = SetCover() dim_size = 20 one_dim = (ValueType.DISCRETE, [0, 1], False) dim_list", "np.e return value def sphere_discrete_order(solution): \"\"\" Sphere function for integer continuous optimization \"\"\"", "1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1,", "for i in range(len(self.__weight)): allweight += self.__weight[i] dims = [] for i in", "0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0,", "0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1,", "setup objective parameter = Parameter(budget=100 * dim, parallel=True, server_num=2, seed=2) # parameter =", "0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([0, 0, 1, 0,", "1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0,", "1, 0, 0, 0, 1, 1, 1]) self.__subset.append([1, 0, 0, 1, 0, 1,", "= Objective(problem.fx, dim) # form up the objective function budget = 100 *", "RacosCommon from zoopt.algos.opt_algorithms.racos.sracos import SRacos from zoopt import Solution, Objective, Dimension, Parameter, Opt,", "0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0]) self.__subset.append([0, 1,", "= Solution(value=2) set = [s0, s1, s2, s3, s4] sracos = SRacos() assert", "0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0,", "1, 1, 1, 0, 0]) self.__subset.append([1, 0, 1, 0, 0, 0, 1, 0,", "2, 3, 2, 3, 4] def test_racos_common_is_distinct(self): a = Solution(x=[1, 2, 3]) b", "ave_seq = sum([(i - bias) * (i - bias) for i in x])", "= [0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814,", "200 def test_sracos_performance2(self): # continuous dim = 100 # dimension one_dim = (ValueType.CONTINUOUS,", "dim_size dim = Dimension2(dim_list) # the dim is prepared by the class objective", "[(one_dim)] * dim_size dim = Dimension2(dim_list) # the dim is prepared by the", "dim, [True] * dim)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol", "objective = Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective,", "1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0,", "0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1,", "'neg') assert len(iset2) == 5 and iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self): s0 =", "* dim, init_samples=[Solution([0] * 100)]) # init with init_samples solution_list = ExpOpt.min(objective, parameter,", "solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500 objective =", "len(iset) == 5 and iset[0].get_value() == 0 and iset[1].get_value() == 1 and iset[2].get_value()", "iset[2].get_value() == 2 \\ and iset[3].get_value() == 2.1 and iset[4].get_value() == 3 iset2", "# setup objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1) solution = ExpOpt.min(objective,", "Parameter(budget=budget, seed=777) sol = Opt.min(objective, parameter) assert sol.get_value() < 2 # sphere dim_size", "1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0,", "assert sol.get_value() < 200 def test_sracos_performance2(self): # continuous dim = 100 # dimension", "-20 * np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 + np.e return value", "= SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset) == 5 and", "4) == 3 def test_sracos_strategy_wr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 =", "0, 1, 1]) def fx(self, solution): \"\"\" Objective function. :param solution: a Solution", "0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([0,", "Objective(sphere_discrete_order, dim) # form up the objective function parameter = Parameter(budget=10000, parallel=True, server_num=2,", "+ 20.0 + np.e return value def sphere_discrete_order(solution): \"\"\" Sphere function for integer", "# setup objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter) assert solution.get_value()", "# setcover problem = SetCover() dim_size = 20 one_dim = (ValueType.DISCRETE, [0, 1],", "1], 1e-6) dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) parameter =", "in x]) / len(x) ave_cos = sum([np.cos(2.0 * np.pi * (i - bias))", "1, 1], value=0) s1 = Solution(x=[2.2, 2.2, 2.2], value=1) s2 = Solution(x=[3, 3,", ":return: the value of f(x) \"\"\" x = solution.get_x() allweight = 0 countw", "0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0,", "type : integer dim_order = [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys,", "= Solution(x=[1, 1, 1], value=1) s2 = Solution(x=[2, 2, 2], value=2) s3 =", "s0 = Solution(x=[1, 1, 1], value=0) s1 = Solution(x=[2.2, 2.2, 2.2], value=1) s2", "of calls to the objective function parameter = Parameter(budget=budget, sequential=False, seed=777) sol =", "dim(self): \"\"\" Dimension of set cover problem. :return: Dimension instance \"\"\" dim_size =", "= Solution(x=[0, 0, 0], value=0.5) s1 = Solution(x=[1, 1, 1], value=1) s2 =", "0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 0, 1, 0, 1,", "objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter) assert solution.get_value() < 1.5", "define this problem as a class \"\"\" def __init__(self): self.__weight = [0.8356, 0.5495,", "def test_sracos_binary_search(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3 =", "@property def dim(self): \"\"\" Dimension of set cover problem. :return: Dimension instance \"\"\"", "iset[2].get_value() == 2.1 def test_sracos_replace(self): s0 = Solution(x=[0, 0, 0], value=0.5) s1 =", "the objective function parameter = Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol = ExpOpt.min(objective, parameter,", "[True] * dim)) # setup objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1)", "< 1.5 # discrete # setcover problem = SetCover() dim_size = 20 one_dim", "0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0,", "Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim)) # setup objective parameter", "\"\"\" x = solution.get_x() allweight = 0 countw = 0 for i in", "def test_racos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley, Dimension(dim,", "solution.get_value() < 2 # discrete # setcover problem = SetCover() dim = problem.dim", "0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset = [] self.__subset.append([0, 1,", "dim, sequential=False, seed=1) solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim =", "= Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim)) # setup objective", "order=dim_order) # form up the dimension object objective = Objective(sphere_discrete_order, dim) # form", "1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0,", "sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset) == 5", "1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1,", "0, 1, 0, 1]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0,", "1, 1, 0]) self.__subset.append([0, 0, 1, 1, 1, 1, 0, 1, 1, 1,", "optimization \"\"\" x = solution.get_x() value = sum([(i-2)*(i-2) for i in x]) return", "countw += allweight return countw @property def dim(self): \"\"\" Dimension of set cover", "0, 1, 1, 1, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 0,", "1: dims[j] = True full = True for i in range(len(dims)): if dims[i]", "+= self.__weight[i] for j in range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j] = True", "= Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter) assert solution.get_value() < 1.5 # discrete", "in range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)): if x[i] == 1: countw +=", "* dim)) # setup objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter)", "pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x, 'neg', 'LM') assert neg_set[3].get_value() == 0.1 def test_racos_performance(self):", "this problem as a class \"\"\" def __init__(self): self.__weight = [0.8356, 0.5495, 0.4444,", "s1 = Solution(x=[1, 1, 1], value=1) s2 = Solution(x=[2, 2, 2], value=2) s3", "parameter) sol.print_solution() assert solution.get_value() < 2 # discrete # setcover problem = SetCover()", "0, 1, 1, 1, 1, 0, 0, 0, 1, 1]) self.__subset.append([1, 0, 0,", "0, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 1, 0, 0, 0,", "== 5 assert sracos.binary_search(set, test_s3, 0, 4) == 0 assert sracos.binary_search(set, test_s4, 0,", "1, 0]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0,", "0, 0, 0, 1, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 1,", "# setcover problem = SetCover() dim = problem.dim # the dim is prepared", "0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0,", "set = [s0, s1, s2, s3, s4] sracos = SRacos() assert sracos.binary_search(set, test_s1,", "1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0,", "0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0]) self.__subset.append([0,", "self.__subset.append([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0,", "0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0,", "assert value < 0.2 # discrete # setcover problem = SetCover() dim =", "is True def test_sracos_distance(self): a = [2, 4] b = [5, 8] assert", "1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0,", "Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 2 # sphere dim_size = 100 #", "1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0,", "range dim_tys = [False] * dim_size # dimension type : integer dim_order =", "0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1,", "parameter)[0] assert solution.get_value() < 0.2 dim = 500 dim_list = [(one_dim)] * dim", "self.__subset.append([1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1,", "parameter) assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions one_dim", "self.__subset.append([0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1,", "up the objective function parameter = Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1) sol =", "= sum([np.cos(2.0 * np.pi * (i - bias)) for i in x]) /", "sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_racos_performance2(self): # continuous", "0 2 4 test_s1 = Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3 = Solution(value=-1) test_s4", "1]] * dim, [True] * dim)) # setup objective parameter = Parameter(budget=100 *", "[-1, 1], 1e-6) dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) #", "dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100 *", "up the objective function parameter = Parameter(budget=10000, sequential=False, seed=77) sol = Opt.min(objective, parameter)", "1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0,", "0, 0, 1]) self.__subset.append([0, 0, 1, 1, 1, 0, 1, 1, 0, 0,", "= Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def test_asracos_performance(self): #", "1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1,", "solution.get_value() < 0.2 dim = 500 one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list", "solution.get_value() < 2 # discrete # setcover problem = SetCover() dim_size = 20", "0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1,", "0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0,", "the value of f(x) \"\"\" x = solution.get_x() allweight = 0 countw =", "= [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100 * dim,", "# parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)]) # init with init_samples", "in x]) / len(x) value = -20 * np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos)", "* np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 + np.e return value def", "0, 1, 0, 0, 0, 0, 1, 0]) self.__subset.append([0, 1, 1, 1, 0,", "0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1,", "0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0,", "Opt.min(objective, parameter) assert sol.get_value() < 200 def test_asracos_performance(self): # continuous dim = 100", "test_sracos_performance2(self): # continuous dim = 100 # dimension one_dim = (ValueType.CONTINUOUS, [-1, 1],", "self.__subset.append([0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1,", "== 5 and iset[0].get_value() == 0 and iset[1].get_value() == 1 and iset[2].get_value() ==", "0, 1, 1, 1]) self.__subset.append([1, 0, 0, 1, 0, 1, 1, 1, 1,", "sol.get_value() < 200 def test_racos_performance2(self): # continuous dim = 100 # dimension one_dim", "function budget = 100 * dim.get_size() # number of calls to the objective", "0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884,", "500 dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective", "iset[1].get_value() == 1 and iset[2].get_value() == 2 \\ and iset[3].get_value() == 2.1 and", "Solution(value=-1) test_s4 = Solution(value=2) set = [s0, s1, s2, s3, s4] sracos =", "b) == 5 def test_sracos_binary_search(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 =", "Solution(value=3) s4 = Solution(value=4) # 1 3 0 2 4 test_s1 = Solution(value=2.1)", "1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0,", "for i in range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)): if x[i] == 1:", "or iset[1].get_value() == 2.1 or iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self): s0 = Solution(x=[1,", "solution.get_value() < 1.5 # discrete # setcover problem = SetCover() dim = problem.dim", "test_s1) assert len(iset) == 3 and (iset[0].get_value() == 2.1 or iset[1].get_value() == 2.1", "j in range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j] = True full = True", "1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0,", "# setup objective parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter)", "1, 0, 1, 1, 0]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1,", "if self.__subset[i][j] == 1: dims[j] = True full = True for i in", "dimension type : integer dim_order = [True] * dim_size dim = Dimension(dim_size, dim_regs,", "has some extra initialization tasks, thus we define this problem as a class", "0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0,", "Objective, Dimension, Parameter, Opt, ExpOpt, ValueType, Dimension2 import numpy as np def ackley(solution):", "1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1])", "1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0,", "parameter = Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective, parameter) sol.print_solution() assert solution.get_value() <", "continuous optimization \"\"\" x = solution.get_x() value = sum([(i-2)*(i-2) for i in x])", "0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([1, 0,", "1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0,", "2.1 or iset[1].get_value() == 2.1 or iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self): s0 =", "# form up the objective function budget = 100 * dim.get_size() # number", "= Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol = ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value() <", "this problem has some extra initialization tasks, thus we define this problem as", "== 5 and iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self): s0 = Solution(value=0) s1 =", "1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1,", "def test_sracos_strategy_rr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) iset =", "1, 1, 0, 1, 0, 1, 0, 0, 1]) self.__subset.append([0, 0, 0, 0,", "1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0,", "1, 0, 0]) self.__subset.append([0, 0, 0, 1, 0, 0, 1, 1, 0, 1,", "self.__subset = [] self.__subset.append([0, 1, 0, 0, 0, 1, 0, 1, 0, 0,", "1, 0, 0, 1]) self.__subset.append([0, 0, 1, 1, 1, 0, 1, 1, 0,", "Solution(value=3) s4 = Solution(value=4) iset = [s0, s1, s2, s3, s4] sracos =", "1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0])", "1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0,", "Solution, Objective, Dimension, Parameter, Opt, ExpOpt, ValueType, Dimension2 import numpy as np def", "1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1,", "0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1,", "dim = Dimension2(dim_list) # the dim is prepared by the class objective =", "== 2.1 def test_sracos_strategy_rr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2)", "iset[1].get_value() == 2.1 or iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1,", "0.1 sracos.replace(neg_set, x, 'neg', 'LM') assert neg_set[3].get_value() == 0.1 def test_racos_performance(self): # continuous", "1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0,", "1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 1, 0, 0, 0, 0,", "s4] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset) ==", "0.3008] self.__subset = [] self.__subset.append([0, 1, 0, 0, 0, 1, 0, 1, 0,", "# discrete # setcover problem = SetCover() dim = problem.dim # the dim", "x]) / len(x) ave_cos = sum([np.cos(2.0 * np.pi * (i - bias)) for", "1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0,", "1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0,", "test_s1 = Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value() == 2.1", "return value def sphere_discrete_order(solution): \"\"\" Sphere function for integer continuous optimization \"\"\" x", "= Dimension2(dim_list) # form up the dimension object objective = Objective(sphere_discrete_order, dim) #", "objective = Objective(problem.fx, dim) # form up the objective function budget = 100", "1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1])", "sum([(i - bias) * (i - bias) for i in x]) / len(x)", "= Solution(value=3) s4 = Solution(value=4) # 1 3 0 2 4 test_s1 =", "0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset =", "Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter) assert", "== 3 assert sracos.binary_search(set, test_s2, 0, 4) == 5 assert sracos.binary_search(set, test_s3, 0,", "return value class SetCover: \"\"\" set cover problem for discrete optimization this problem", "1, 1, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 0, 0, 1,", "0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0,", "0, 0, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 0, 0, 0,", "= [5, 8] assert SRacos.distance(a, b) == 5 def test_sracos_binary_search(self): s0 = Solution(value=0)", "3, 4]) c = Solution(x=[3, 4, 5]) seti = [a, b] assert RacosCommon.is_distinct(seti,", "1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1,", "# the dim is prepared by the class objective = Objective(problem.fx, dim) #", "objective function budget = 100 * dim.get_size() # number of calls to the", "i in x]) / len(x) value = -20 * np.exp(-0.2 * np.sqrt(ave_seq)) -", "1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 0, 0, 1,", "\"\"\" Sphere function for integer continuous optimization \"\"\" x = solution.get_x() value =", "= Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_racos_performance2(self): # continuous dim", "1, 0, 0, 0, 1, 1, 0, 0, 1]) self.__subset.append([0, 0, 1, 1,", "0]) self.__subset.append([0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0,", "0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1,", "1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0,", "1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1,", "2 \\ and iset[3].get_value() == 2.1 and iset[4].get_value() == 3 iset2 = [s1,", "0.2 dim = 500 objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True]", "1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0,", "= 500 objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim))", "1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0]) self.__subset.append([0,", "0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1,", "# sphere dim_size = 100 # dimensions dim_regs = [[-10, 10]] * dim_size", "sracos.replace(pos_set, x, 'pos', 'WR') assert pos_set[4].get_value() == 3 and pos_set[0].get_value() == 0.1 sracos.replace(neg_set,", "3 def test_sracos_strategy_wr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3", "200 def test_racos_performance2(self): # continuous dim = 100 # dimension one_dim = (ValueType.CONTINUOUS,", "dim.get_size() # number of calls to the objective function parameter = Parameter(budget=budget, sequential=False,", "1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1,", "assert RacosCommon.extend(a, b) == [1, 2, 3, 2, 3, 4] def test_racos_common_is_distinct(self): a", "1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0])", "* dim, seed=77) solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim =", "allweight += self.__weight[i] dims = [] for i in range(len(self.__subset[0])): dims.append(False) for i", "setcover problem = SetCover() dim_size = 20 one_dim = (ValueType.DISCRETE, [0, 1], False)", "0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1,", "= Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)]) # init with init_samples solution_list =", "\"\"\" Objective function. :param solution: a Solution object :return: the value of f(x)", "1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0,", "0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1,", "1, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 0, 0, 1, 1,", "0]) self.__subset.append([1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0,", "sphere dim_size = 100 # dimensions dim_regs = [[-10, 10]] * dim_size #", "self.__weight = [0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979,", "seed=77) solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500 objective", "s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) # 1 3 0", "up the objective function budget = 100 * dim.get_size() # number of calls", "s0, test_s1) assert iset[2].get_value() == 2.1 def test_sracos_replace(self): s0 = Solution(x=[0, 0, 0],", "[s1, s3, s0, s2, s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2) == 5 and", "1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0,", "assert pos_set[4].get_value() == 3 and pos_set[0].get_value() == 0.1 sracos.replace(neg_set, x, 'neg', 'LM') assert", "1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1,", "20 dim_regs = [[0, 1]] * dim_size dim_tys = [False] * dim_size return", "assert len(iset) == 3 and (iset[0].get_value() == 2.1 or iset[1].get_value() == 2.1 or", "Solution(x=[2, 2, 2], value=2) s3 = Solution(x=[3, 3, 3], value=3) s4 = Solution(x=[4,", "[[-1, 1]] * dim, [True] * dim)) # setup objective parameter = Parameter(budget=100", "0, 0, 1, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 1, 1,", "one_dim = (ValueType.DISCRETE, [0, 1], False) dim_list = [(one_dim)] * dim_size dim =", "1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 0, 0, 1, 1, 0,", "s4 = Solution(value=4) # 1 3 0 2 4 test_s1 = Solution(value=2.1) test_s2", "0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1,", "parameter = Parameter(budget=budget, seed=777) sol = Opt.min(objective, parameter) assert sol.get_value() < 2 #", "4, 4], value=4) pos_set = [s0, s1, s2, s3, s4] neg_set = [s2,", "cover problem for discrete optimization this problem has some extra initialization tasks, thus", "Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective,", "Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1) sol = ExpOpt.min(objective, parameter)[0] assert sol.get_value() < 10", "1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0,", "Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_racos_performance2(self): # continuous dim =", "0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0,", "= Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter)", "< 200 def test_asracos_performance(self): # continuous dim = 100 # dimension objective =", "# number of calls to the objective function parameter = Parameter(budget=budget, parallel=True, server_num=2,", "1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0,", "1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0]) self.__subset.append([0, 0,", "seed=1) sol = Opt.min(objective, parameter) sol.print_solution() assert solution.get_value() < 2 # discrete #", "self.__subset.append([0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1,", "in range(len(dims)): if dims[i] is False: full = False if full is False:", "0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0,", "0, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1])", "1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0,", "1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0,", "1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1])", "1, 0, 0, 1, 1]) def fx(self, solution): \"\"\" Objective function. :param solution:", "1, 0, 1, 0, 0, 0, 1, 0, 1]) self.__subset.append([0, 1, 1, 0,", "8] assert SRacos.distance(a, b) == 5 def test_sracos_binary_search(self): s0 = Solution(value=0) s1 =", "= [a, b] assert RacosCommon.is_distinct(seti, a) is False and RacosCommon.is_distinct(seti, c) is True", "0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0,", "= ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500 dim_list = [(one_dim)]", "dim.get_size() # number of calls to the objective function parameter = Parameter(budget=budget, seed=777)", "0 for i in range(len(self.__weight)): allweight += self.__weight[i] dims = [] for i", "parameter) assert solution.get_value() < 0.2 dim = 500 objective = Objective(ackley, Dimension(dim, [[-1,", "0, 4) == 5 assert sracos.binary_search(set, test_s3, 0, 4) == 0 assert sracos.binary_search(set,", "= [[-10, 10]] * dim_size # dimension range dim_tys = [False] * dim_size", "1], value=0) s1 = Solution(x=[2.2, 2.2, 2.2], value=1) s2 = Solution(x=[3, 3, 3],", "1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1,", "1, 1, 1, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 0, 0,", "1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0,", "dim = 500 objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] *", "Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4)", "0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 1,", "1, 1, 0, 1, 1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1,", "1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,", "the class objective = Objective(problem.fx, dim) # form up the objective function budget", "0, 0, 1, 1, 1, 1]) self.__subset.append([0, 0, 1, 1, 0, 1, 1,", "0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1,", "value = solution.get_value() assert value < 0.2 # discrete # setcover problem =", "1, 0, 1, 0, 0, 1]) self.__subset.append([0, 0, 0, 0, 0, 0, 1,", "if full is False: countw += allweight return countw @property def dim(self): \"\"\"", "0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289,", "(iset[0].get_value() == 2.1 or iset[1].get_value() == 2.1 or iset[2].get_value() == 2.1) def test_sracos_strategy_lm(self):", "value < 0.2 # discrete # setcover problem = SetCover() dim = problem.dim", "dim) # form up the objective function parameter = Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1,", "dim)) # setup objective parameter = Parameter(budget=100 * dim, parallel=True, server_num=2, seed=2) #", "0, 1, 0, 0, 1]) self.__subset.append([0, 0, 0, 0, 0, 0, 1, 1,", "dim = 500 one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] *", "Solution(x=[3, 3, 3], value=3) s4 = Solution(x=[4, 4, 4], value=4) pos_set = [s0,", "parameter = Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter) assert solution.get_value() <", "= Parameter(budget=budget, seed=777) sol = Opt.min(objective, parameter) assert sol.get_value() < 2 # sphere", "1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1,", "= Opt.min(objective, parameter) assert sol.get_value() < 2 # sphere dim_size = 100 #", "range(len(dims)): if dims[i] is False: full = False if full is False: countw", "sol.get_value() < 200 def test_sracos_performance(self): # continuous dim = 100 # dimension objective", "1, 0, 0, 1, 0, 0]) self.__subset.append([0, 0, 0, 1, 0, 0, 1,", "[(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000,", "sracos = SRacos() sracos.replace(pos_set, x, 'pos', 'WR') assert pos_set[4].get_value() == 3 and pos_set[0].get_value()", "0, 0, 1, 0, 0]) self.__subset.append([0, 0, 0, 1, 0, 0, 1, 1,", "= -20 * np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 + np.e return", "test_s3 = Solution(value=-1) test_s4 = Solution(value=2) set = [s0, s1, s2, s3, s4]", "parameter) sol.print_solution() assert sol.get_value() < 200 def test_racos_performance2(self): # continuous dim = 100", "a = [2, 4] b = [5, 8] assert SRacos.distance(a, b) == 5", "def test_racos_performance2(self): # continuous dim = 100 # dimension one_dim = (ValueType.CONTINUOUS, [-1,", "= Solution(value=3) s4 = Solution(value=4) iset = [s0, s1, s2, s3, s4] sracos", "1, 0, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 1, 0, 0,", "setup objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter) assert solution.get_value() <", "1, 1]) def fx(self, solution): \"\"\" Objective function. :param solution: a Solution object", "= Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) # 1", "function parameter = Parameter(budget=10000, sequential=False, seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value()", "seed=1) solution = ExpOpt.min(objective, parameter)[0] assert solution.get_value() < 0.2 dim = 500 dim_list", "< 2 # discrete # setcover problem = SetCover() dim = problem.dim #", "assert neg_set[3].get_value() == 0.1 def test_racos_performance(self): # continuous dim = 100 # dimension", "0, 0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([0, 0, 1,", "sracos = SRacos() assert sracos.binary_search(set, test_s1, 0, 4) == 3 assert sracos.binary_search(set, test_s1,", "1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0,", "= [False] * dim_size return Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object): def test_racos_common_extend(self): a", "np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0 + np.e return value def sphere_discrete_order(solution):", "Solution(x=[1, 1, 1], value=0) s1 = Solution(x=[2.2, 2.2, 2.2], value=1) s2 = Solution(x=[3,", "1, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1,", "< 200 def test_racos_performance2(self): # continuous dim = 100 # dimension one_dim =", "0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1,", "assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions one_dim =", "0, 0, 1, 1]) def fx(self, solution): \"\"\" Objective function. :param solution: a", "[-10, 10], True) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) # form", "value=0.5) s1 = Solution(x=[1, 1, 1], value=1) s2 = Solution(x=[2, 2, 2], value=2)", "0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1,", "self.__subset.append([0, 1, 0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0,", "dimensions dim_regs = [[-10, 10]] * dim_size # dimension range dim_tys = [False]", "SRacos() test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset) == 5 and iset[0].get_value()", "= Solution(x=[1, 1, 1], value=0) s1 = Solution(x=[2.2, 2.2, 2.2], value=1) s2 =", "== 3 def test_sracos_strategy_wr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2)", "0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1,", "1, 1]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1,", "0, 0, 1, 0, 1]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1,", "1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1,", "objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1) solution = ExpOpt.min(objective, parameter)[0] assert", "1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1,", "seed=777) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 2 # sphere dim_size", "1, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0])", "Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) iset = [s0, s1, s2] sracos", "0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1,", "0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1,", "False if full is False: countw += allweight return countw @property def dim(self):", "0, 0]) self.__subset.append([0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0,", "for discrete optimization this problem has some extra initialization tasks, thus we define", "= Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) iset = [s0, s1, s2]", "x = Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos = SRacos() sracos.replace(pos_set, x, 'pos', 'WR')", "0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1,", "1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 0, 0,", "= Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) iset =", "0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0,", "1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0,", "(ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list))", "1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1]) self.__subset.append([0, 1,", "10]] * dim_size # dimension range dim_tys = [False] * dim_size # dimension", "0, 1, 1]) self.__subset.append([1, 0, 0, 0, 1, 1, 0, 1, 1, 1,", "countw @property def dim(self): \"\"\" Dimension of set cover problem. :return: Dimension instance", "= [2, 4] b = [5, 8] assert SRacos.distance(a, b) == 5 def", "b) == [1, 2, 3, 2, 3, 4] def test_racos_common_is_distinct(self): a = Solution(x=[1,", "# setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective, parameter) sol.print_solution()", "init with init_samples solution_list = ExpOpt.min(objective, parameter, repeat=1) for solution in solution_list: value", "s1, s2] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset) ==", "return Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object): def test_racos_common_extend(self): a = [1, 2, 3]", "and iset[0].get_value() == 0 and iset[1].get_value() == 1 and iset[2].get_value() == 2 \\", "sequential=False, seed=1) sol = Opt.min(objective, parameter) sol.print_solution() assert solution.get_value() < 2 # discrete", "assert sracos.binary_search(set, test_s1, 0, 4) == 3 assert sracos.binary_search(set, test_s1, 0, 2) ==", "0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 0,", "0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1,", "test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1, 1], value=0) s1 = Solution(x=[2.2, 2.2, 2.2], value=1)", "1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 0]) self.__subset.append([0,", "1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0,", "self.__subset.append([0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0,", "ExpOpt.min(objective, parameter, repeat=1) for solution in solution_list: value = solution.get_value() assert value <", "for i in x]) / len(x) ave_cos = sum([np.cos(2.0 * np.pi * (i", "the objective function parameter = Parameter(budget=budget, sequential=False, seed=777) sol = Opt.min(objective, parameter) sol.print_solution()", "seti = [a, b] assert RacosCommon.is_distinct(seti, a) is False and RacosCommon.is_distinct(seti, c) is", "= Solution(x=[3, 3, 3], value=3) s4 = Solution(x=[4, 4, 4], value=4) pos_set =", "dim_size dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form up the dimension object", "self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1,", "sol.print_solution() assert sol.get_value() < 200 def test_racos_performance2(self): # continuous dim = 100 #", "5 and iset[0].get_value() == 0 and iset[1].get_value() == 1 and iset[2].get_value() == 2", "2, 3]) b = Solution(x=[2, 3, 4]) c = Solution(x=[3, 4, 5]) seti", "4 test_s1 = Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3 = Solution(value=-1) test_s4 = Solution(value=2)", "* 100)]) # init with init_samples solution_list = ExpOpt.min(objective, parameter, repeat=1) for solution", "sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 2 # sphere dim_size =", "s3, s1, s4, s0] x = Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos = SRacos()", "0, 1, 0, 1, 0, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0,", "sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def test_sracos_performance2(self): # continuous dim", "1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1]) self.__subset.append([0,", "- bias) * (i - bias) for i in x]) / len(x) ave_cos", "0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0,", "dimension range dim_tys = [False] * dim_size # dimension type : integer dim_order", "0]) self.__subset.append([0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1,", "0, 0, 1, 0, 0, 0, 1, 0]) self.__subset.append([1, 1, 1, 0, 1,", "0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862,", "sol = Opt.min(objective, parameter) assert sol.get_value() < 2 # sphere dim_size = 100", "s4] sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2) == 5 and iset2[4].get_value() == 2.1 def", "0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1,", "0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008]", "sum([np.cos(2.0 * np.pi * (i - bias)) for i in x]) / len(x)", "np.exp(ave_cos) + 20.0 + np.e return value def sphere_discrete_order(solution): \"\"\" Sphere function for", "dim_regs, dim_tys) class TestRacos(object): def test_racos_common_extend(self): a = [1, 2, 3] b =", "to the objective function parameter = Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol = ExpOpt.min(objective,", "neg_set = [s2, s3, s1, s4, s0] x = Solution(x=[2.1, 2.1, 2.1], value=0.1)", "self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0,", "0, 1, 0, 0, 1, 1]) def fx(self, solution): \"\"\" Objective function. :param", "2], value=2) s3 = Solution(x=[3, 3, 3], value=3) s4 = Solution(x=[4, 4, 4],", "0, 0]) self.__subset.append([1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0,", "RacosCommon.is_distinct(seti, c) is True def test_sracos_distance(self): a = [2, 4] b = [5,", "self.__weight[i] for j in range(len(self.__subset[i])): if self.__subset[i][j] == 1: dims[j] = True full", "1, 1, 0, 0, 0, 1, 1, 1]) self.__subset.append([1, 0, 0, 1, 0,", "dims = [] for i in range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)): if", "1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1,", "0, 0, 0, 1, 1, 1]) self.__subset.append([1, 0, 0, 1, 0, 1, 1,", "0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1,", "zoopt.algos.opt_algorithms.racos.sracos import SRacos from zoopt import Solution, Objective, Dimension, Parameter, Opt, ExpOpt, ValueType,", "0 countw = 0 for i in range(len(self.__weight)): allweight += self.__weight[i] dims =", "test_s1 = Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset) == 5 and iset[0].get_value() ==", "dim_tys = [False] * dim_size # dimension type : integer dim_order = [True]", "= Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3 = Solution(value=-1) test_s4 = Solution(value=2) set =", "0, 1, 0, 1, 0, 0, 0, 1, 0, 1]) self.__subset.append([0, 1, 1,", "== 1: countw += self.__weight[i] for j in range(len(self.__subset[i])): if self.__subset[i][j] == 1:", "assert solution.get_value() < 0.2 dim = 500 objective = Objective(ackley, Dimension(dim, [[-1, 1]]", "1, 0, 1, 0, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0, 1,", "SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset) == 3 and (iset[0].get_value() ==", "SetCover() dim = problem.dim # the dim is prepared by the class objective", "[1, 2, 3] b = [2, 3, 4] assert RacosCommon.extend(a, b) == [1,", "1, 1, 0, 0, 0]) self.__subset.append([0, 0, 1, 1, 0, 1, 0, 1,", "import Solution, Objective, Dimension, Parameter, Opt, ExpOpt, ValueType, Dimension2 import numpy as np", "= Parameter(budget=100 * dim, seed=77) solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2", "objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective, parameter) sol.print_solution() assert solution.get_value()", "continuous optimization \"\"\" x = solution.get_x() bias = 0.2 ave_seq = sum([(i -", "test_racos_performance2(self): # continuous dim = 100 # dimension one_dim = (ValueType.CONTINUOUS, [-1, 1],", "0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 0,", "1, 1], value=1) s2 = Solution(x=[2, 2, 2], value=2) s3 = Solution(x=[3, 3,", "Objective function. :param solution: a Solution object :return: the value of f(x) \"\"\"", "== 5 def test_sracos_binary_search(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2)", "SRacos.distance(a, b) == 5 def test_sracos_binary_search(self): s0 = Solution(value=0) s1 = Solution(value=1) s2", "1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1,", "True def test_sracos_distance(self): a = [2, 4] b = [5, 8] assert SRacos.distance(a,", "\"\"\" def __init__(self): self.__weight = [0.8356, 0.5495, 0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429,", "1, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1,", "Ackley function for continuous optimization \"\"\" x = solution.get_x() bias = 0.2 ave_seq", "0]) self.__subset.append([0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0,", "0, 0, 0, 1, 1]) self.__subset.append([1, 0, 0, 0, 1, 1, 0, 1,", "1, 0, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1,", "of set cover problem. :return: Dimension instance \"\"\" dim_size = 20 dim_regs =", "1, 1, 1, 0, 1, 0, 1, 0, 0, 1]) self.__subset.append([0, 0, 0,", "1, 0, 0, 1, 0, 0, 0, 1, 0]) self.__subset.append([1, 1, 1, 0,", "def sphere_discrete_order(solution): \"\"\" Sphere function for integer continuous optimization \"\"\" x = solution.get_x()", "dim)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective, parameter)", "0]) self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0,", "dim = 100 # dimension one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list =", "[0, 1], False) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) # the", "Solution(x=[3, 4, 5]) seti = [a, b] assert RacosCommon.is_distinct(seti, a) is False and", "1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0,", "optimization this problem has some extra initialization tasks, thus we define this problem", "0, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0])", "0, 4) == 3 assert sracos.binary_search(set, test_s1, 0, 2) == 3 assert sracos.binary_search(set,", "x = solution.get_x() value = sum([(i-2)*(i-2) for i in x]) return value class", "1, 1, 1, 0, 0, 0, 1, 1]) self.__subset.append([1, 0, 0, 0, 1,", "0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0]) self.__subset.append([0, 0,", "500 objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True] * dim)) #", "* dim)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective,", "assert sol.get_value() < 2 # sphere dim_size = 100 # dimensions dim_regs =", "s1 = Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) iset", "1, 1, 1]) self.__subset.append([1, 0, 0, 1, 0, 1, 1, 1, 1, 1,", "Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def test_sracos_performance2(self): # continuous", "to the objective function parameter = Parameter(budget=budget, sequential=False, seed=777) sol = Opt.min(objective, parameter)", "= Parameter(budget=budget, sequential=False, seed=777) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 2", "1, 0, 0, 1, 1, 1, 1]) self.__subset.append([0, 0, 1, 1, 0, 1,", "1, 0, 0, 1]) self.__subset.append([0, 0, 0, 0, 0, 0, 1, 1, 1,", "Solution(x=[2, 3, 4]) c = Solution(x=[3, 4, 5]) seti = [a, b] assert", "= Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value() == 2.1 def", "problem = SetCover() dim = problem.dim # the dim is prepared by the", "1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0,", "SRacos() test_s1 = Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value() ==", "for i in range(len(self.__subset)): if x[i] == 1: countw += self.__weight[i] for j", "sequential=False, seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_racos_performance2(self):", "Dimension2(dim_list) # the dim is prepared by the class objective = Objective(problem.fx, dim)", "[(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) parameter = Parameter(budget=100 * dim, seed=77)", "1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1,", "0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1,", "Solution(x=[1, 2, 3]) b = Solution(x=[2, 3, 4]) c = Solution(x=[3, 4, 5])", "= ExpOpt.min(objective, parameter, repeat=1) for solution in solution_list: value = solution.get_value() assert value", "sracos = SRacos() test_s1 = Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert", "1, 0, 0, 1, 0, 0, 1, 1, 1, 1]) self.__subset.append([0, 0, 1,", "test_s1, 'pos') assert len(iset) == 5 and iset[0].get_value() == 0 and iset[1].get_value() ==", "x]) / len(x) value = -20 * np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) +", "0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0,", "0, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1,", "0.1 def test_racos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley,", "1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0, 0,", "solution = Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500 one_dim =", "1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0, 0])", "test_racos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1,", "< 2 # sphere dim_size = 100 # dimensions dim_regs = [[-10, 10]]", "1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 0, 0,", "1, 1, 1, 1, 0, 0, 0, 1, 1]) self.__subset.append([1, 0, 0, 0,", "1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0,", "[s2, s3, s1, s4, s0] x = Solution(x=[2.1, 2.1, 2.1], value=0.1) sracos =", "1, 0, 0, 0, 1, 0]) self.__subset.append([1, 1, 1, 0, 1, 1, 0,", "0]) self.__subset.append([0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1,", "assert solution.get_value() < 2 # discrete # setcover problem = SetCover() dim =", "= Opt.min(objective, parameter) assert sol.get_value() < 200 def test_asracos_performance(self): # continuous dim =", "1, 0]) self.__subset.append([0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0,", "4) == 5 assert sracos.binary_search(set, test_s3, 0, 4) == 0 assert sracos.binary_search(set, test_s4,", "value=1) s2 = Solution(x=[2, 2, 2], value=2) s3 = Solution(x=[3, 3, 3], value=3)", "0, 0, 0, 0, 0]) self.__subset.append([0, 1, 0, 0, 1, 0, 0, 0,", "0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0]) self.__subset.append([0,", "[] for i in range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)): if x[i] ==", "test_asracos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1,", "repeat=1) for solution in solution_list: value = solution.get_value() assert value < 0.2 #", "dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1)", "0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1,", "0, 0, 0, 0]) self.__subset.append([0, 1, 0, 0, 1, 0, 0, 0, 0,", "0, 1, 0, 1, 1, 0]) self.__subset.append([1, 0, 0, 0, 1, 0, 0,", "dim)) # setup objective parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter) assert", "1]) self.__subset.append([1, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0,", "1, 0, 0, 0, 1, 0, 0, 1, 1]) def fx(self, solution): \"\"\"", "value=0.1) sracos = SRacos() sracos.replace(pos_set, x, 'pos', 'WR') assert pos_set[4].get_value() == 3 and", "Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)]) # init with init_samples solution_list = ExpOpt.min(objective,", "1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([0, 0, 1, 0, 0,", "0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1,", "from zoopt.algos.opt_algorithms.racos.sracos import SRacos from zoopt import Solution, Objective, Dimension, Parameter, Opt, ExpOpt,", "1, 0, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0,", "0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 1,", "# dimensions dim_regs = [[-10, 10]] * dim_size # dimension range dim_tys =", "assert sracos.binary_search(set, test_s3, 0, 4) == 0 assert sracos.binary_search(set, test_s4, 0, 4) ==", "5 def test_sracos_binary_search(self): s0 = Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3", "1, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0,", "Opt, ExpOpt, ValueType, Dimension2 import numpy as np def ackley(solution): \"\"\" Ackley function", "dim_size # dimension range dim_tys = [False] * dim_size # dimension type :", "Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset) == 3 and (iset[0].get_value() == 2.1 or iset[1].get_value()", "[s0, s1, s2] sracos = SRacos() test_s1 = Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset,", "= Solution(value=0) s1 = Solution(value=1) s2 = Solution(value=2) s3 = Solution(value=3) s4 =", "seed=777) sol = Opt.min(objective, parameter) assert sol.get_value() < 2 # sphere dim_size =", "200 def test_sracos_performance(self): # continuous dim = 100 # dimension objective = Objective(ackley,", "solution = Opt.min(objective, parameter) assert solution.get_value() < 1.5 # discrete # setcover problem", "budget = 100 * dim.get_size() # number of calls to the objective function", "dim_order = [True] * dim_size dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form", "b = [2, 3, 4] assert RacosCommon.extend(a, b) == [1, 2, 3, 2,", "== 0 assert sracos.binary_search(set, test_s4, 0, 4) == 3 def test_sracos_strategy_wr(self): s0 =", "= Solution(value=4.5) test_s3 = Solution(value=-1) test_s4 = Solution(value=2) set = [s0, s1, s2,", "1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1]) self.__subset.append([1,", "1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0, 0,", "0, 0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0,", "== 0 and iset[1].get_value() == 1 and iset[2].get_value() == 2 \\ and iset[3].get_value()", "the dimension object objective = Objective(sphere_discrete_order, dim) # form up the objective function", "3 and (iset[0].get_value() == 2.1 or iset[1].get_value() == 2.1 or iset[2].get_value() == 2.1)", "0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0,", "self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1,", "0, 0, 0, 1, 0]) self.__subset.append([0, 1, 1, 1, 0, 0, 1, 0,", "1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1,", "1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1])", "< 200 def test_sracos_performance(self): # continuous dim = 100 # dimension objective =", "b] assert RacosCommon.is_distinct(seti, a) is False and RacosCommon.is_distinct(seti, c) is True def test_sracos_distance(self):", "# number of calls to the objective function parameter = Parameter(budget=budget, sequential=False, seed=777)", "/ len(x) value = -20 * np.exp(-0.2 * np.sqrt(ave_seq)) - np.exp(ave_cos) + 20.0", "discrete # setcover problem = SetCover() dim_size = 20 one_dim = (ValueType.DISCRETE, [0,", "= Opt.min(objective, parameter) assert solution.get_value() < 0.2 dim = 500 one_dim = (ValueType.CONTINUOUS,", "= sum([(i - bias) * (i - bias) for i in x]) /", "[False] * dim_size # dimension type : integer dim_order = [True] * dim_size", "0, 0, 1, 1, 0]) self.__subset.append([0, 0, 1, 1, 1, 1, 0, 1,", "setcover problem = SetCover() dim = problem.dim # the dim is prepared by", "0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0,", "0, 1, 1, 0, 1, 0, 1, 0, 1, 1, 0, 1, 1,", "i in range(len(self.__weight)): allweight += self.__weight[i] dims = [] for i in range(len(self.__subset[0])):", "2 # sphere dim_size = 100 # dimensions one_dim = (ValueType.DISCRETE, [-10, 10],", "parameter = Parameter(budget=10000, seed=777) solution = Opt.min(objective, parameter) assert solution.get_value() < 1.5 #", "0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 1,", "b = Solution(x=[2, 3, 4]) c = Solution(x=[3, 4, 5]) seti = [a,", "prepared by the class objective = Objective(problem.fx, dim) # form up the objective", "s2, s3, s4] sracos = SRacos() assert sracos.binary_search(set, test_s1, 0, 4) == 3", "= Solution(x=[4, 4, 4], value=4) pos_set = [s0, s1, s2, s3, s4] neg_set", "in range(len(self.__weight)): allweight += self.__weight[i] dims = [] for i in range(len(self.__subset[0])): dims.append(False)", "* np.pi * (i - bias)) for i in x]) / len(x) value", "bias = 0.2 ave_seq = sum([(i - bias) * (i - bias) for", "* dim)) # setup objective parameter = Parameter(budget=100 * dim, seed=77) solution =", "10], True) dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) # form up", "0, 0]) self.__subset.append([0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0,", "0, 0, 1, 0, 0, 1, 0, 1]) self.__subset.append([1, 0, 0, 0, 1,", "0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1,", "Dimension2(dim_list)) # setup objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1) solution =", "value of f(x) \"\"\" x = solution.get_x() allweight = 0 countw = 0", "2.1, 2.1], value=0.1) sracos = SRacos() sracos.replace(pos_set, x, 'pos', 'WR') assert pos_set[4].get_value() ==", "0, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0,", "i in range(len(dims)): if dims[i] is False: full = False if full is", "dim_size dim = Dimension2(dim_list) # form up the dimension object objective = Objective(sphere_discrete_order,", "sol.print_solution() assert solution.get_value() < 2 # discrete # setcover problem = SetCover() dim", "[[-1, 1]] * dim, [True] * dim)) # setup objective parameter = Parameter(budget=10000,", "dim = problem.dim # the dim is prepared by the class objective =", "1]) self.__subset.append([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0,", "s0 = Solution(x=[0, 0, 0], value=0.5) s1 = Solution(x=[1, 1, 1], value=1) s2", "function for continuous optimization \"\"\" x = solution.get_x() bias = 0.2 ave_seq =", "1, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1]) self.__subset.append([0,", "0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 0]) self.__subset.append([0, 0,", "0, 0, 0, 1, 0, 1, 0, 1, 0, 0]) self.__subset.append([0, 0, 1,", "3], value=3) s4 = Solution(x=[4, 4, 4], value=4) pos_set = [s0, s1, s2,", "< 0.2 dim = 500 dim_list = [(one_dim)] * dim objective = Objective(ackley,", "= Parameter(budget=10000, parallel=True, server_num=2, uncertain_bits=1, seed=1) sol = ExpOpt.min(objective, parameter)[0] assert sol.get_value() <", "1]) self.__subset.append([0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1,", "1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 0, 1,", "< 2 # sphere dim_size = 100 # dimensions one_dim = (ValueType.DISCRETE, [-10,", "1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0,", ":param solution: a Solution object :return: the value of f(x) \"\"\" x =", "1 and iset[2].get_value() == 2 \\ and iset[3].get_value() == 2.1 and iset[4].get_value() ==", "of calls to the objective function parameter = Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol", "= Opt.min(objective, parameter) assert solution.get_value() < 1.5 # discrete # setcover problem =", "0, 0, 0, 0, 1, 1, 0]) self.__subset.append([0, 0, 1, 1, 1, 1,", "1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 1,", "< 0.2 dim = 500 one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list =", "1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1,", "3] b = [2, 3, 4] assert RacosCommon.extend(a, b) == [1, 2, 3,", "1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1]) self.__subset.append([0, 0,", "dim_list = [(one_dim)] * dim_size dim = Dimension2(dim_list) # form up the dimension", "solution.get_x() allweight = 0 countw = 0 for i in range(len(self.__weight)): allweight +=", "= True full = True for i in range(len(dims)): if dims[i] is False:", "# form up the dimension object objective = Objective(sphere_discrete_order, dim) # form up", "0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 1])", "Solution object :return: the value of f(x) \"\"\" x = solution.get_x() allweight =", "1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0,", "problem has some extra initialization tasks, thus we define this problem as a", "1], value=1) s2 = Solution(x=[2, 2, 2], value=2) s3 = Solution(x=[3, 3, 3],", "= Solution(value=2) iset = [s0, s1, s2] sracos = SRacos() test_s1 = Solution(value=2.1)", "objective parameter = Parameter(budget=100 * dim, parallel=True, server_num=2, seed=2) # parameter = Parameter(budget=100", "sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def test_asracos_performance(self): # continuous dim", "< 200 def test_sracos_performance2(self): # continuous dim = 100 # dimension one_dim =", "= True for i in range(len(dims)): if dims[i] is False: full = False", "1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1,", "iset = [s0, s1, s2] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1)", "ValueType, Dimension2 import numpy as np def ackley(solution): \"\"\" Ackley function for continuous", "initialization tasks, thus we define this problem as a class \"\"\" def __init__(self):", "0, 1, 1, 0, 1, 0, 0, 1, 0, 0]) self.__subset.append([0, 0, 0,", "s2] sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset) == 3", "1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0,", "1]) self.__subset.append([0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1,", "= Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol =", "0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1,", "== [1, 2, 3, 2, 3, 4] def test_racos_common_is_distinct(self): a = Solution(x=[1, 2,", "+= self.__weight[i] dims = [] for i in range(len(self.__subset[0])): dims.append(False) for i in", "sracos = SRacos() test_s1 = Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset) == 3 and", "sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset) == 5 and iset[0].get_value() == 0 and iset[1].get_value()", "test_s3, 0, 4) == 0 assert sracos.binary_search(set, test_s4, 0, 4) == 3 def", "discrete # setcover problem = SetCover() dim = problem.dim # the dim is", "0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1,", "1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1,", "= Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def test_sracos_performance2(self): #", "= [False] * dim_size # dimension type : integer dim_order = [True] *", "1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1]) self.__subset.append([1, 0,", "100 # dimension one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6) dim_list = [(one_dim)] *", "1, 0, 0, 1, 0, 0, 0, 0, 0, 0]) self.__subset.append([0, 1, 0,", "Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol = ExpOpt.min(objective, parameter, repeat=1)[0] assert sol.get_value() < 2", "= SRacos() test_s1 = Solution(x=[2.1, 2.1, 2.1], value=2.1) sracos.strategy_lm(iset, s0, test_s1) assert iset[2].get_value()", "1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0,", "problem for discrete optimization this problem has some extra initialization tasks, thus we", "0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0,", "0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 0, 1,", "1, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0,", "Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, sequential=False, seed=1) sol = Opt.min(objective, parameter)", "form up the objective function parameter = Parameter(budget=10000) sol = Opt.min(objective, parameter) assert", "0.4444, 0.7269, 0.9960, 0.6633, 0.5062, 0.8429, 0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267,", "1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1]) def fx(self,", "'pos') assert len(iset) == 5 and iset[0].get_value() == 0 and iset[1].get_value() == 1", "0, 0], value=0.5) s1 = Solution(x=[1, 1, 1], value=1) s2 = Solution(x=[2, 2,", "1, 1, 0, 1, 0, 0, 1, 0, 0]) self.__subset.append([0, 0, 0, 1,", "full = False if full is False: countw += allweight return countw @property", "0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0,", "0, 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0,", "dim_regs = [[0, 1]] * dim_size dim_tys = [False] * dim_size return Dimension(dim_size,", "Solution(value=4) iset = [s0, s1, s2, s3, s4] sracos = SRacos() test_s1 =", "Solution(value=1) s2 = Solution(value=2) iset = [s0, s1, s2] sracos = SRacos() test_s1", "calls to the objective function parameter = Parameter(budget=budget, sequential=False, seed=777) sol = Opt.min(objective,", "and (iset[0].get_value() == 2.1 or iset[1].get_value() == 2.1 or iset[2].get_value() == 2.1) def", "4) == 3 assert sracos.binary_search(set, test_s1, 0, 2) == 3 assert sracos.binary_search(set, test_s2,", "zoopt.algos.opt_algorithms.racos.racos_common import RacosCommon from zoopt.algos.opt_algorithms.racos.sracos import SRacos from zoopt import Solution, Objective, Dimension,", "+ np.e return value def sphere_discrete_order(solution): \"\"\" Sphere function for integer continuous optimization", "= solution.get_x() bias = 0.2 ave_seq = sum([(i - bias) * (i -", "0, 0, 0, 1, 1, 0, 0, 1]) self.__subset.append([0, 0, 1, 1, 1,", "dim = Dimension(dim_size, dim_regs, dim_tys, order=dim_order) # form up the dimension object objective", "0, 0, 0, 0, 0, 0]) self.__subset.append([0, 1, 0, 0, 1, 0, 0,", "= Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=100 * dim, sequential=False, seed=1)", "i in range(len(self.__subset)): if x[i] == 1: countw += self.__weight[i] for j in", "0, 0]) self.__subset.append([0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1,", "(i - bias)) for i in x]) / len(x) value = -20 *", "import RacosCommon from zoopt.algos.opt_algorithms.racos.sracos import SRacos from zoopt import Solution, Objective, Dimension, Parameter,", "0, 1, 0]) self.__subset.append([1, 1, 1, 0, 1, 1, 0, 0, 0, 0,", "= 100 # dimension objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim, [True]", "0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1,", "def test_sracos_distance(self): a = [2, 4] b = [5, 8] assert SRacos.distance(a, b)", "assert len(iset2) == 5 and iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self): s0 = Solution(value=0)", "from zoopt.algos.opt_algorithms.racos.racos_common import RacosCommon from zoopt.algos.opt_algorithms.racos.sracos import SRacos from zoopt import Solution, Objective,", "= False if full is False: countw += allweight return countw @property def", "integer continuous optimization \"\"\" x = solution.get_x() value = sum([(i-2)*(i-2) for i in", "1, 1, 0, 0, 1, 0, 0, 1, 1, 0, 1, 0, 0,", "parameter = Parameter(budget=10000) sol = Opt.min(objective, parameter) assert sol.get_value() < 200 def test_sracos_performance2(self):", "sracos.binary_search(set, test_s2, 0, 4) == 5 assert sracos.binary_search(set, test_s3, 0, 4) == 0", "\"\"\" dim_size = 20 dim_regs = [[0, 1]] * dim_size dim_tys = [False]", "server_num=2, seed=2) # parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)]) # init", "1, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0]) self.__subset.append([0, 1,", "assert len(iset) == 5 and iset[0].get_value() == 0 and iset[1].get_value() == 1 and", "1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1,", "assert solution.get_value() < 0.2 dim = 500 one_dim = (ValueType.CONTINUOUS, [-1, 1], 1e-6)", "0, 0, 1, 1, 1]) self.__subset.append([1, 0, 0, 1, 0, 1, 1, 1,", "0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0,", "iset[3].get_value() == 2.1 and iset[4].get_value() == 3 iset2 = [s1, s3, s0, s2,", "= Objective(sphere_discrete_order, dim) # form up the objective function parameter = Parameter(budget=10000, parallel=True,", "0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0,", "self.__subset.append([0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0,", "dimension object objective = Objective(sphere_discrete_order, dim) # form up the objective function parameter", "= solution.get_x() value = sum([(i-2)*(i-2) for i in x]) return value class SetCover:", "0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0,", "calls to the objective function parameter = Parameter(budget=budget, parallel=True, server_num=2, seed=777) sol =", "1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0,", "1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 0, 0,", "solution.get_x() value = sum([(i-2)*(i-2) for i in x]) return value class SetCover: \"\"\"", "1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0,", "1, 1, 0, 0, 0, 1, 1, 0, 0, 1]) self.__subset.append([0, 0, 1,", "s2 = Solution(value=2) iset = [s0, s1, s2] sracos = SRacos() test_s1 =", "sracos.strategy_wr(iset2, test_s1, 'neg') assert len(iset2) == 5 and iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self):", "sracos.strategy_rr(iset, test_s1) assert len(iset) == 3 and (iset[0].get_value() == 2.1 or iset[1].get_value() ==", "test_racos_common_is_distinct(self): a = Solution(x=[1, 2, 3]) b = Solution(x=[2, 3, 4]) c =", "s4 = Solution(value=4) iset = [s0, s1, s2, s3, s4] sracos = SRacos()", "i in x]) / len(x) ave_cos = sum([np.cos(2.0 * np.pi * (i -", "= Solution(value=2.1) sracos.strategy_rr(iset, test_s1) assert len(iset) == 3 and (iset[0].get_value() == 2.1 or", "1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0,", "0, 1, 0, 0, 1, 0, 1]) self.__subset.append([1, 0, 0, 0, 1, 0,", "self.__subset.append([0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1,", "b = [5, 8] assert SRacos.distance(a, b) == 5 def test_sracos_binary_search(self): s0 =", "solution.get_value() < 0.2 dim = 500 dim_list = [(one_dim)] * dim objective =", "1, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1]) self.__subset.append([1, 0,", "== 2.1) def test_sracos_strategy_lm(self): s0 = Solution(x=[1, 1, 1], value=0) s1 = Solution(x=[2.2,", "1, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 1, 1, 0, 1,", "0.6289, 0.3008] self.__subset = [] self.__subset.append([0, 1, 0, 0, 0, 1, 0, 1,", "f(x) \"\"\" x = solution.get_x() allweight = 0 countw = 0 for i", "self.__subset.append([1, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1,", "* (i - bias) for i in x]) / len(x) ave_cos = sum([np.cos(2.0", "dim, [True] * dim)) # setup objective parameter = Parameter(budget=10000, seed=777) solution =", "range(len(self.__subset[0])): dims.append(False) for i in range(len(self.__subset)): if x[i] == 1: countw += self.__weight[i]", "solution_list = ExpOpt.min(objective, parameter, repeat=1) for solution in solution_list: value = solution.get_value() assert", "s1 = Solution(value=1) s2 = Solution(value=2) iset = [s0, s1, s2] sracos =", "== 1 and iset[2].get_value() == 2 \\ and iset[3].get_value() == 2.1 and iset[4].get_value()", "and iset2[4].get_value() == 2.1 def test_sracos_strategy_rr(self): s0 = Solution(value=0) s1 = Solution(value=1) s2", "1], 1e-6) dim_list = [(one_dim)] * dim objective = Objective(ackley, Dimension2(dim_list)) # setup", "seed=2) # parameter = Parameter(budget=100 * dim, init_samples=[Solution([0] * 100)]) # init with", "[2, 3, 4] assert RacosCommon.extend(a, b) == [1, 2, 3, 2, 3, 4]", "0.1293, 0.7355, 0.7979, 0.2814, 0.7962, 0.1754, 0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset", "seed=77) sol = Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_sracos_performance(self): #", "# 1 3 0 2 4 test_s1 = Solution(value=2.1) test_s2 = Solution(value=4.5) test_s3", "0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1,", "Objective(sphere_discrete_order, dim) # form up the objective function parameter = Parameter(budget=10000, sequential=False, seed=77)", "self.__subset.append([1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1,", "= Solution(value=2.1) sracos.strategy_wr(iset, test_s1, 'pos') assert len(iset) == 5 and iset[0].get_value() == 0", "1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 0,", "full is False: countw += allweight return countw @property def dim(self): \"\"\" Dimension", "1]) def fx(self, solution): \"\"\" Objective function. :param solution: a Solution object :return:", "SetCover() dim_size = 20 one_dim = (ValueType.DISCRETE, [0, 1], False) dim_list = [(one_dim)]", "Solution(x=[0, 0, 0], value=0.5) s1 = Solution(x=[1, 1, 1], value=1) s2 = Solution(x=[2,", "= Parameter(budget=100 * dim, parallel=True, server_num=2, seed=2) # parameter = Parameter(budget=100 * dim,", "Opt.min(objective, parameter) sol.print_solution() assert sol.get_value() < 200 def test_sracos_performance(self): # continuous dim =", "0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0,", "dim_size dim_tys = [False] * dim_size return Dimension(dim_size, dim_regs, dim_tys) class TestRacos(object): def", "0.0267, 0.9862, 0.1786, 0.5884, 0.6289, 0.3008] self.__subset = [] self.__subset.append([0, 1, 0, 0,", "1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0,", "dim, init_samples=[Solution([0] * 100)]) # init with init_samples solution_list = ExpOpt.min(objective, parameter, repeat=1)", "0 and iset[1].get_value() == 1 and iset[2].get_value() == 2 \\ and iset[3].get_value() ==", "Solution(value=2) s3 = Solution(value=3) s4 = Solution(value=4) iset = [s0, s1, s2, s3,", "* dim objective = Objective(ackley, Dimension2(dim_list)) # setup objective parameter = Parameter(budget=10000, sequential=False,", "3]) b = Solution(x=[2, 3, 4]) c = Solution(x=[3, 4, 5]) seti =", "def test_sracos_performance2(self): # continuous dim = 100 # dimension one_dim = (ValueType.CONTINUOUS, [-1,", "False: full = False if full is False: countw += allweight return countw", "0, 1, 0, 0, 1, 0, 0]) self.__subset.append([0, 0, 0, 1, 0, 0,", "0, 1, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1,", "0, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1,", "0, 0, 1]) self.__subset.append([0, 1, 1, 0, 1, 1, 0, 0, 0, 1,", "= Solution(value=-1) test_s4 = Solution(value=2) set = [s0, s1, s2, s3, s4] sracos", "0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0]) self.__subset.append([1,", "0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1,", "Solution(value=2) set = [s0, s1, s2, s3, s4] sracos = SRacos() assert sracos.binary_search(set,", "1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1,", "< 0.2 dim = 500 objective = Objective(ackley, Dimension(dim, [[-1, 1]] * dim," ]
[ "criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo <@&{new_role.id}> foi criado por <@{author.id}>\",", "role -> CtxRoleConverter mode -> channel, category or role perm -> permission to", "que foi criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo <@&{new_role.id}> foi criado", "False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels = True)", "like! \"\"\" def __init__(self, client): self.client = client # Some good paramters like", "\"\"\"Create a new role with the given name \"\"\" await ctx.message.delete(delay = self.delete_user_message)", "movimentação do mesmo para os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[", "async def permission(self, ctx, *, args: str = \"\"): \"\"\" Arg List: ctx", "This function is used to return a name to a role linked to", "= args.split(' ') if len(splitted_args) < 4 or args == \"\": # Just", ") # TODO: Especificar a mensagem de acordo com o cargo que foi", "-> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for item in self.channel_permissions]), False) ], img=\"https://cdn.discordapp.com/emojis/767241157003837460.png?v=1\") await", "and channel.category != None: option = channel.category.name + \" - \" + channel.name", "self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1]) perm = splitted_args[-2] mode = splitted_args[-3] role_name =", "manage_channels = True) async def permission(self, ctx, *, args: str = \"\"): \"\"\"", "that you like! \"\"\" def __init__(self, client): self.client = client # Some good", "color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite .get no", "msg.channel.name elif type.lower() == \"channel\": option = msg.channel.name elif type.lower() == \"category\": option", "# New Role Created! new_role = await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role '{new_role.name}'", "ctx, *, args: str = \"channel\"): \"\"\"Create a new role with the given", "= commands.RoleConverter() # If found it # The role already exists try: r", "@commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True) async def delete(self, ctx, *, role: commands.RoleConverter): await", "perm, can) if mode == 'category': category = ctx.channel.category await category.set_permissions(role, overwrite =", "(f\"Permissão '{perm}'\", f\"Atualizada para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles", "Function to monitor guild channels and delete a role linked to a channel", "], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels = True) async", "False), (f\"Permissões\", \"\\n\".join([item for item in self.channel_permissions]), False) ], img=\"https://cdn.discordapp.com/emojis/767241157003837460.png?v=1\") await ctx.send(embed=embedmsg) #", "str2bool(splitted_args[-1]) perm = splitted_args[-2] mode = splitted_args[-3] role_name = ' '.join(splitted_args[:-3]) status, role", "= client # Some good paramters like timer and other shits with open(os.path.dirname(os.path.abspath(__file__))", "{new_role.name} e ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg)", "'{role.name}' apagado do servidor por <@{ctx.author.id}>!\") @delete.error async def delete_error(self, ctx, error): await", "random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name}", "r.name == option: role = r await role.delete() self.log.debug(f\"Role '{option}' deleted because linked", "else: # New Role Created! new_role = await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role", "TODO: N ta funcionando await role.edit(permission = overwrite) self.log.debug( (f'Permission {perm} was changed", "from pymongo import MongoClient import logging # ENV from dotenv import dotenv_values ENV", "and delete a role linked to a channel if the channel was moved", "with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as f: info = json.load(f) # Just to", "e ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return", "it in a parent class def linked_role(self, ctx, type: str): \"\"\" This function", "\"channel\"): \"\"\"Create a new role with the given name \"\"\" await ctx.message.delete(delay =", ".get {new_role.name} e ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await", "and msg.channel.category != None: option = msg.channel.category.name + \" - \" + msg.channel.name", "self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\",", "bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for item in self.channel_permissions]), False) ], img=\"https://cdn.discordapp.com/emojis/767241157003837460.png?v=1\") await ctx.send(embed=embedmsg)", "before.category != None: role_name = before.category.name + \" - \" + before.name else:", "'{perm}'\", f\"Atualizada para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles =", "255))), fields=[ (f\"Argumentos\", f\"\"\"role -> Role mode -> channel, category or role perm", "None: option = channel.category.name + \" - \" + channel.name # I don't", "role to a chat or just create a role with a name that", "self.delete_system_message) else: self.log.error(f\"{error} - delete role failed\") await ctx.send(error, delete_after = self.delete_system_message) async", "\"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\",", "already exists try: r = await conv.convert(ctx, role_name) return True, r except commands.RoleNotFound:", "self.delete_system_message) async def _permission(self, ctx, role: CtxRoleConverter, mode: str, perm: str, can: bool):", "excluído devido a movimentação do mesmo para os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255),", "], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel moved", "failed\") await ctx.send(error, delete_after = self.delete_system_message) # TODO: Parent class too async def", "Cargo '{role.name}' apagado do servidor por <@{ctx.author.id}>!\") @delete.error async def delete_error(self, ctx, error):", "\"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\",", "random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite '.get' e ele será", "current context, return a status and the role, if it exists. \"\"\" conv", "linkado a uma categoria! if before.category != None: role_name = before.category.name + \"", "client # Some good paramters like timer and other shits with open(os.path.dirname(os.path.abspath(__file__)) +", "description= f\"O cargo <@&{role.id}> foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0,", "Especificar a mensagem de acordo com o cargo que foi criado! embedmsg =", "cargo '{role_name}' associado ao canal foi excluído devido a movimentação do mesmo para", "reduce import random import json import utils.embed as embed from utils.colors import *", "- delete role failed\") await ctx.send(error, delete_after = self.delete_system_message) async def _permission(self, ctx,", "for r in channel.guild.roles: if r.name == option: role = r await role.delete()", "associado excluído!\", description= f\"O cargo '{role_name}' associado ao canal foi excluído devido a", "a new role with the given name \"\"\" await ctx.message.delete(delay = self.delete_user_message) linked_keys", "delete_after = self.delete_system_message) async def _permission(self, ctx, role: CtxRoleConverter, mode: str, perm: str,", "N ta funcionando await role.edit(permission = overwrite) self.log.debug( (f'Permission {perm} was changed to", "delete_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você", "shit, but i won't change elif channel.type.name.lower() == \"text\": option = channel.name else:", "= [\"channel\", \"category\"] role_name = self.linked_role(ctx, args) if args in linked_keys else args", "to {can} in role {role.name} in current category').encode('ascii', 'ignore').decode('ascii') ) fb = 'Permitido'", "msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels = True) async def permission(self, ctx,", "+ \"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager is useful to create and delete roles.", "- \" + before.name else: role_name = before.name # Categoria que devo deletar", "- \" + channel.name # I don't know why i did that shit,", "= self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode criar um cargo!\",", "= msg.channel.name elif type.lower() == \"category\": option = msg.channel.category.name else: raise ValueError(\"\") return", "ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error", "True) async def create(self, ctx, *, args: str = \"channel\"): \"\"\"Create a new", "\"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\",", "\" - \" + channel.name # I don't know why i did that", "channel.name for r in channel.guild.roles: if r.name == option: role = r await", "no servidor, não precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))),", "await self._permission(ctx, role, mode, perm, can) async def permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações", "async def role_exists(self, ctx, role_name): \"\"\" Method to check if a role exists", "args: str = \"channel\"): \"\"\"Create a new role with the given name \"\"\"", "servidor, não precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[", "was deleted\") break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True) async def create(self, ctx,", "or category \"\"\" guild = ctx.guild author = ctx.author msg = ctx.message if", "\"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\",", "\"\"\" def __init__(self, client): self.client = client # Some good paramters like timer", "import commands, tasks from discord.ext.commands import has_permissions, CheckFailure from utils.converters import CtxRoleConverter from", "255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite .get no chat", "name to a role linked to a channel or category \"\"\" guild =", "self.delete_system_message = info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\",", "mode = splitted_args[-3] role_name = ' '.join(splitted_args[:-3]) status, role = await self.role_exists(ctx, role_name)", "class too async def role_exists(self, ctx, role_name): \"\"\" Method to check if a", "await ctx.send(error, delete_after = self.delete_system_message) async def _permission(self, ctx, role: CtxRoleConverter, mode: str,", "de Permissões!\", description= f\"Verifique a lista de argumentos e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0,", "pymongo import MongoClient import logging # ENV from dotenv import dotenv_values ENV =", "= channel.name else: option = channel.name for r in channel.guild.roles: if r.name ==", "before.name else: role_name = before.name # Categoria que devo deletar o cargo if", "= embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O cargo <@&{role.id}> já está no servidor, não", "<@&{new_role.id}> foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como", "await ctx.send(\"**Erro:** Você não pode deletar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error}", "channel.name # I don't know why i did that shit, but i won't", "role_name) await self._permission(ctx, role, mode, perm, can) async def permission_tutorial(self, ctx): embedmsg =", "img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel moved to", "''' Function to monitor guild channels and delete a role linked to a", "like timer and other shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as f: info", "to trash ''' # Mudou de categoria if after.category == None: return elif", "\"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\",", "os #DB from pymongo import MongoClient import logging # ENV from dotenv import", "cargo <@&{new_role.id}> foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[", "exists try: r = await conv.convert(ctx, role_name) return True, r except commands.RoleNotFound: return", "ctx -> Discord Context role -> CtxRoleConverter mode -> channel, category or role", "para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels", "= ctx.guild author = ctx.author msg = ctx.message overwrite = discord.PermissionOverwrite() # Fundamental", "if perm not in channel_permissions: self.log.debug( f\"[.permission] Permission {perm} not found!\") return setattr(overwrite,", "f\"Verifique a lista de argumentos e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))),", "import str2bool from functools import reduce import random import json import utils.embed as", "elif type.lower() == \"channel\": option = msg.channel.name elif type.lower() == \"category\": option =", "chat or just create a role with a name that you like! \"\"\"", "role with the given name \"\"\" await ctx.message.delete(delay = self.delete_user_message) linked_keys = [\"channel\",", "\"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async def on_guild_channel_update(self,", "a lista de argumentos e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[", "for now self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1]) perm =", "useful variables guild = ctx.guild author = ctx.author msg = ctx.message role_exists, role", "self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode criar um cargo!\", delete_after", "CtxRoleConverter, mode: str, perm: str, can: bool): guild = ctx.guild author = ctx.author", "embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo <@&{new_role.id}> foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0,", "linked_keys = [\"channel\", \"category\"] role_name = self.linked_role(ctx, args) if args in linked_keys else", "img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: # New Role Created! new_role = await", "await ctx.message.delete(delay= self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do servidor por", "channel if the channel was moved to trash ''' # Mudou de categoria", "category = ctx.channel.category await category.set_permissions(role, overwrite = overwrite) elif mode == 'channel': channel", "ctx.message if type.lower() == \"channel\" and msg.channel.category != None: option = msg.channel.category.name +", "embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo <@&{role.id}> foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0,", "'attr_name', s) if perm not in channel_permissions: self.log.debug( f\"[.permission] Permission {perm} not found!\")", "self._permission(ctx, role, mode, perm, can) async def permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações de", "self.client = client # Some good paramters like timer and other shits with", "embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique a lista de argumentos e permissões\", color=rgb_to_int((random.randint(0, 255),", "shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel moved to archives)!\") return @commands.Cog.listener() async", "async def on_guild_channel_update(self, before, after): ''' Function to monitor guild channels and delete", "\"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async def on_guild_channel_update(self, before, after): '''", "<@&{role.id}> foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Permissão", "role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O cargo <@&{role.id}> já está no", "role failed\") await ctx.send(error, delete_after = self.delete_system_message) async def _permission(self, ctx, role: CtxRoleConverter,", "TODO: Especificar a mensagem de acordo com o cargo que foi criado! embedmsg", "useful to create and delete roles. You can link a role to a", "= embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique a lista de argumentos e permissões\", color=rgb_to_int((random.randint(0,", "logging.getLogger(__name__) # TODO: Loading things :P (I want to put it in a", "if role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O cargo <@&{role.id}> já está", "await ctx.message.delete(delay = self.delete_user_message) linked_keys = [\"channel\", \"category\"] role_name = self.linked_role(ctx, args) if", "ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message)", "Você não pode deletar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - delete", "== None: return elif (before.category == None and after.category != None) or (before.category.id", "is used to return a name to a role linked to a channel", "else: option = channel.name for r in channel.guild.roles: if r.name == option: role", "ao canal foi excluído devido a movimentação do mesmo para os arquivos.\", color=rgb_to_int((random.randint(0,", "msg.channel.send(embed=embedmsg) return @create.error async def create_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if", "deletar o cargo if after.category.id == info['archives']: for r in guild.roles: if r.name", "in the current context, return a status and the role, if it exists.", "\"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async def on_guild_channel_update(self, before, after): ''' Function to monitor", "await self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1]) perm = splitted_args[-2] mode = splitted_args[-3] role_name", "the channel was moved to trash ''' # Mudou de categoria if after.category", "channel or category \"\"\" guild = ctx.guild author = ctx.author msg = ctx.message", "f: info = json.load(f) # Just to log everything :D self.log = logging.getLogger(__name__)", "que devo deletar o cargo if after.category.id == info['archives']: for r in guild.roles:", "a name to a role linked to a channel or category \"\"\" guild", "-> permission to change bool -> bool \"\"\" await ctx.message.delete(delay = self.delete_user_message) splitted_args", "@has_permissions(manage_roles = True) async def delete(self, ctx, *, role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message)", "\"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\",", "mode == 'category': category = ctx.channel.category await category.set_permissions(role, overwrite = overwrite) elif mode", "option: role = r await role.delete() self.log.debug(f\"Role '{option}' deleted because linked channel was", "{role.name} in current category').encode('ascii', 'ignore').decode('ascii') ) fb = 'Permitido' if can else 'Proibido'", "created in guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO: Especificar a mensagem", "creation of a new role failed\") await ctx.send(error, delete_after = self.delete_system_message) # TODO:", "moved to archives)!\") return @commands.Cog.listener() async def on_guild_channel_delete(self, channel): target_type_channels = [\"text\", \"category\"]", "random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite .get no chat do", "\"\": # Just for now self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx) return; can =", "elif channel.type.name.lower() == \"text\": option = channel.name else: option = channel.name for r", "role_name = before.category.name + \" - \" + before.name else: role_name = before.name", "channel.type.name.lower() not in target_type_channels: return elif channel.type.name.lower() == \"text\" and channel.category != None:", "#DB from pymongo import MongoClient import logging # ENV from dotenv import dotenv_values", "role_name = ' '.join(splitted_args[:-3]) status, role = await self.role_exists(ctx, role_name) await self._permission(ctx, role,", "from utils.converters import CtxRoleConverter from utils.utils import str2bool from functools import reduce import", "after.category == None: return elif (before.category == None and after.category != None) or", "ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode criar um", "await role.edit(permission = overwrite) self.log.debug( (f'Permission {perm} was changed to {can} in role", "option = channel.name for r in channel.guild.roles: if r.name == option: role =", "\"\"\" Arg List: ctx -> Discord Context role -> CtxRoleConverter mode -> channel,", "None and after.category != None) or (before.category.id != after.category.id): guild = after.guild info", "(\"Como pegar?\", f\"Apenas digite '.get' e ele será adicionado na sua conta\", False)", "elif type.lower() == \"category\": option = msg.channel.category.name else: raise ValueError(\"\") return option; @commands.command(aliases=['deletar'],", "= ' '.join(splitted_args[:-3]) status, role = await self.role_exists(ctx, role_name) await self._permission(ctx, role, mode,", "option = msg.channel.name elif type.lower() == \"category\": option = msg.channel.category.name else: raise ValueError(\"\")", "delete roles. You can link a role to a chat or just create", "a name that you like! \"\"\" def __init__(self, client): self.client = client #", "é linkado a uma categoria! if before.category != None: role_name = before.category.name +", "in current category').encode('ascii', 'ignore').decode('ascii') ) fb = 'Permitido' if can else 'Proibido' embedmsg", "commands.RoleNotFound: return False, None # TODO: Put it in a parent class def", "def _permission(self, ctx, role: CtxRoleConverter, mode: str, perm: str, can: bool): guild =", "the given name \"\"\" await ctx.message.delete(delay = self.delete_user_message) linked_keys = [\"channel\", \"category\"] role_name", "new role failed\") await ctx.send(error, delete_after = self.delete_system_message) # TODO: Parent class too", "channel.guild.roles: if r.name == option: role = r await role.delete() self.log.debug(f\"Role '{option}' deleted", "random import json import utils.embed as embed from utils.colors import * import os", "import MongoClient import logging # ENV from dotenv import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__))", "\"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ]", "a role linked to a channel if the channel was moved to trash", "you like! \"\"\" def __init__(self, client): self.client = client # Some good paramters", "elif mode == 'channel': channel = ctx.channel await channel.set_permissions(role, overwrite = overwrite) else:", "= overwrite) else: # TODO: N ta funcionando await role.edit(permission = overwrite) self.log.debug(", "because linked channel was deleted\") break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True) async", "= overwrite) self.log.debug( (f'Permission {perm} was changed to {can} in role {role.name} in", "in channel.guild.roles: if r.name == option: role = r await role.delete() self.log.debug(f\"Role '{option}'", "x.attr_name = s # setattr(x, 'attr_name', s) if perm not in channel_permissions: self.log.debug(", "try: r = await conv.convert(ctx, role_name) return True, r except commands.RoleNotFound: return False,", "if len(splitted_args) < 4 or args == \"\": # Just for now self.log.debug(\"[.permission]", "255), random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role -> Role mode -> channel, category or", "from functools import reduce import random import json import utils.embed as embed from", "def __init__(self, client): self.client = client # Some good paramters like timer and", "perm not in channel_permissions: self.log.debug( f\"[.permission] Permission {perm} not found!\") return setattr(overwrite, perm,", "= \"\"): \"\"\" Arg List: ctx -> Discord Context role -> CtxRoleConverter mode", "channel was moved to trash ''' # Mudou de categoria if after.category ==", "new_role = await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role '{new_role.name}' created in guild {guild.name}", ":P (I want to put it in a parent class, but i'm not", "@has_permissions(manage_roles = True) async def create(self, ctx, *, args: str = \"channel\"): \"\"\"Create", "to a channel if the channel was moved to trash ''' # Mudou", "], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: # New Role Created! new_role =", "role linked to a channel or category \"\"\" guild = ctx.guild author =", "role perm -> permission to change bool -> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for", "from dotenv import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager", "= info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions =", "else: role_name = before.name # Categoria que devo deletar o cargo if after.category.id", "{role_name} deleted (Channel moved to archives)!\") return @commands.Cog.listener() async def on_guild_channel_delete(self, channel): target_type_channels", "guild = ctx.guild author = ctx.author msg = ctx.message overwrite = discord.PermissionOverwrite() #", "fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True)", "change bool -> bool \"\"\" await ctx.message.delete(delay = self.delete_user_message) splitted_args = args.split(' ')", "r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O cargo '{role_name}' associado ao canal", "role with a name that you like! \"\"\" def __init__(self, client): self.client =", "ctx.send(error, delete_after = self.delete_system_message) # TODO: Parent class too async def role_exists(self, ctx,", "with the given name \"\"\" await ctx.message.delete(delay = self.delete_user_message) linked_keys = [\"channel\", \"category\"]", "dotenv import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager is", "''' # Mudou de categoria if after.category == None: return elif (before.category ==", "= after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado sempre que um chat", "ENV from dotenv import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog): \"\"\"", "\" - \" + msg.channel.name elif type.lower() == \"channel\": option = msg.channel.name elif", "random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role -> Role mode -> channel, category", "def create(self, ctx, *, args: str = \"channel\"): \"\"\"Create a new role with", "'{option}' deleted because linked channel was deleted\") break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles =", "= embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo <@&{role.id}> foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255),", "await self.role_exists(ctx, role_name) await self._permission(ctx, role, mode, perm, can) async def permission_tutorial(self, ctx):", "[ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\",", "to check if a role exists in the current context, return a status", "+ channel.name # I don't know why i did that shit, but i", "deleted (Channel moved to archives)!\") return @commands.Cog.listener() async def on_guild_channel_delete(self, channel): target_type_channels =", "adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: #", "return @create.error async def create_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error,", "conv = commands.RoleConverter() # If found it # The role already exists try:", "de argumentos e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role", "= await conv.convert(ctx, role_name) return True, r except commands.RoleNotFound: return False, None #", "na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: # New", "random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role -> Role mode -> channel, category or role", "author = ctx.author msg = ctx.message if type.lower() == \"channel\" and msg.channel.category !=", "# I don't know why i did that shit, but i won't change", "na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error async def create_error(self,", "<@{ctx.author.id}>!\") @delete.error async def delete_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error,", "f\"Apenas digite '.get' e ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\")", "commands.RoleConverter() # If found it # The role already exists try: r =", "if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode deletar um cargo!\", delete_after =", "str = \"\"): \"\"\" Arg List: ctx -> Discord Context role -> CtxRoleConverter", "categoria! if before.category != None: role_name = before.category.name + \" - \" +", "in a parent class, but i'm not sure at this moment) self.delete_user_message =", "guild = after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado sempre que um", "'Permitido' if can else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo <@&{role.id}>", "role_name = self.linked_role(ctx, args) if args in linked_keys else args # Defining useful", "perm: str, can: bool): guild = ctx.guild author = ctx.author msg = ctx.message", "*, args: str = \"\"): \"\"\" Arg List: ctx -> Discord Context role", "overwrite = overwrite) elif mode == 'channel': channel = ctx.channel await channel.set_permissions(role, overwrite", "r = await conv.convert(ctx, role_name) return True, r except commands.RoleNotFound: return False, None", "None # TODO: Put it in a parent class def linked_role(self, ctx, type:", "context, return a status and the role, if it exists. \"\"\" conv =", "self.delete_system_message) else: # New Role Created! new_role = await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New", "= MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\",", "of a new role failed\") await ctx.send(error, delete_after = self.delete_system_message) # TODO: Parent", "mesmo para os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\")", "and delete roles. You can link a role to a chat or just", "\"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\",", "{guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO: Especificar a mensagem de acordo com o cargo", "embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo <@&{new_role.id}> foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0,", "option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True) async def delete(self, ctx, *, role: commands.RoleConverter):", "if after.category == None: return elif (before.category == None and after.category != None)", "deletar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - delete role failed\") await", "changed to {can} in role {role.name} in current category').encode('ascii', 'ignore').decode('ascii') ) fb =", "\"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\",", "\"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\",", "if after.category.id == info['archives']: for r in guild.roles: if r.name == role_name: await", "embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo <@&{role.id}> foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0,", "True) async def permission(self, ctx, *, args: str = \"\"): \"\"\" Arg List:", "splitted_args[-3] role_name = ' '.join(splitted_args[:-3]) status, role = await self.role_exists(ctx, role_name) await self._permission(ctx,", "TODO: Loading things :P (I want to put it in a parent class,", "a parent class def linked_role(self, ctx, type: str): \"\"\" This function is used", "role already exists try: r = await conv.convert(ctx, role_name) return True, r except", "a channel or category \"\"\" guild = ctx.guild author = ctx.author msg =", "CheckFailure from utils.converters import CtxRoleConverter from utils.utils import str2bool from functools import reduce", "delete_after = self.delete_system_message) else: self.log.error(f\"{error} - delete role failed\") await ctx.send(error, delete_after =", "raise ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True) async def delete(self, ctx,", "await ctx.message.delete(delay = self.delete_user_message) splitted_args = args.split(' ') if len(splitted_args) < 4 or", "= self.delete_user_message) splitted_args = args.split(' ') if len(splitted_args) < 4 or args ==", "channel_permissions: self.log.debug( f\"[.permission] Permission {perm} not found!\") return setattr(overwrite, perm, can) if mode", "' '.join(splitted_args[:-3]) status, role = await self.role_exists(ctx, role_name) await self._permission(ctx, role, mode, perm,", "= embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O cargo '{role_name}' associado ao canal foi excluído", "status, role = await self.role_exists(ctx, role_name) await self._permission(ctx, role, mode, perm, can) async", "from utils.utils import str2bool from functools import reduce import random import json import", "channel.type.name.lower() == \"text\": option = channel.name else: option = channel.name for r in", "random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite .get no chat do cargo ou", "# The role already exists try: r = await conv.convert(ctx, role_name) return True,", "color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para {fb}\", False)", "return False, None # TODO: Put it in a parent class def linked_role(self,", "não pode deletar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - delete role", "embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O cargo <@&{role.id}> já está no servidor, não precisa", "\"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\",", "<@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite .get", "o cargo que foi criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo <@&{new_role.id}>", "ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager is useful to create", "that shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel moved to archives)!\") return @commands.Cog.listener()", "<@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para {fb}\",", "+ before.name else: role_name = before.name # Categoria que devo deletar o cargo", "de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite", "category or role perm -> permission to change bool -> bool \"\"\" await", "def permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique a lista de", "Method to check if a role exists in the current context, return a", "except commands.RoleNotFound: return False, None # TODO: Put it in a parent class", "argumentos e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role ->", "img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error async def create_error(self, ctx, error): await ctx.message.delete(delay =", "fields=[ (f\"Argumentos\", f\"\"\"role -> Role mode -> channel, category or role perm ->", "role_name = before.name # Categoria que devo deletar o cargo if after.category.id ==", "bool \"\"\" await ctx.message.delete(delay = self.delete_user_message) splitted_args = args.split(' ') if len(splitted_args) <", "[\"text\", \"category\"] if channel.type.name.lower() not in target_type_channels: return elif channel.type.name.lower() == \"text\" and", "@delete.error async def delete_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure):", "with a name that you like! \"\"\" def __init__(self, client): self.client = client", "args: str = \"\"): \"\"\" Arg List: ctx -> Discord Context role ->", "Defining useful variables guild = ctx.guild author = ctx.author msg = ctx.message role_exists,", "role_exists(self, ctx, role_name): \"\"\" Method to check if a role exists in the", "dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager is useful to create and delete", "to a channel or category \"\"\" guild = ctx.guild author = ctx.author msg", "= ctx.channel.category await category.set_permissions(role, overwrite = overwrite) elif mode == 'channel': channel =", "pegar?\", f\"Apenas digite '.get' e ele será adicionado na sua conta\", False) ],", "I don't know why i did that shit, but i won't change elif", "option = channel.name else: option = channel.name for r in channel.guild.roles: if r.name", "link a role to a chat or just create a role with a", "return; can = str2bool(splitted_args[-1]) perm = splitted_args[-2] mode = splitted_args[-3] role_name = '", "e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role -> Role", "ctx.guild author = ctx.author msg = ctx.message overwrite = discord.PermissionOverwrite() # Fundamental #", "# setattr(x, 'attr_name', s) if perm not in channel_permissions: self.log.debug( f\"[.permission] Permission {perm}", "the current context, return a status and the role, if it exists. \"\"\"", "on_guild_channel_delete(self, channel): target_type_channels = [\"text\", \"category\"] if channel.type.name.lower() not in target_type_channels: return elif", "status and the role, if it exists. \"\"\" conv = commands.RoleConverter() # If", "args in linked_keys else args # Defining useful variables guild = ctx.guild author", "- creation of a new role failed\") await ctx.send(error, delete_after = self.delete_system_message) #", "found!\") return setattr(overwrite, perm, can) if mode == 'category': category = ctx.channel.category await", "Context role -> CtxRoleConverter mode -> channel, category or role perm -> permission", "\"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\",", "else: self.log.error(f\"{error} - delete role failed\") await ctx.send(error, delete_after = self.delete_system_message) async def", "Fundamental # x.attr_name = s # setattr(x, 'attr_name', s) if perm not in", "for item in self.channel_permissions]), False) ], img=\"https://cdn.discordapp.com/emojis/767241157003837460.png?v=1\") await ctx.send(embed=embedmsg) # Setup def setup(client):", "!= None: role_name = before.category.name + \" - \" + before.name else: role_name", "fields=[ (\"Como pegar?\", f\"Apenas digite .get no chat do cargo ou .get {new_role.name}", "-> channel, category or role perm -> permission to change bool -> bool", "os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send", "role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do", "timer and other shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as f: info =", "import CtxRoleConverter from utils.utils import str2bool from functools import reduce import random import", "info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [", "return @commands.Cog.listener() async def on_guild_channel_delete(self, channel): target_type_channels = [\"text\", \"category\"] if channel.type.name.lower() not", "linked_keys else args # Defining useful variables guild = ctx.guild author = ctx.author", "else: # TODO: N ta funcionando await role.edit(permission = overwrite) self.log.debug( (f'Permission {perm}", "(before.category == None and after.category != None) or (before.category.id != after.category.id): guild =", "{can} in role {role.name} in current category').encode('ascii', 'ignore').decode('ascii') ) fb = 'Permitido' if", "i did that shit, but i won't change elif channel.type.name.lower() == \"text\": option", "overwrite = discord.PermissionOverwrite() # Fundamental # x.attr_name = s # setattr(x, 'attr_name', s)", "255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para {fb}\", False) ],", "= splitted_args[-2] mode = splitted_args[-3] role_name = ' '.join(splitted_args[:-3]) status, role = await", "!= None: option = channel.category.name + \" - \" + channel.name # I", "guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role '{new_role.name}' created in guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii')", "msg.channel.name elif type.lower() == \"category\": option = msg.channel.category.name else: raise ValueError(\"\") return option;", "foi criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo <@&{new_role.id}> foi criado por", "role, if it exists. \"\"\" conv = commands.RoleConverter() # If found it #", "= self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado sempre que um chat é linkado a", "\"\"\" Method to check if a role exists in the current context, return", "= self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode deletar um cargo!\",", "= splitted_args[-3] role_name = ' '.join(splitted_args[:-3]) status, role = await self.role_exists(ctx, role_name) await", "utils.converters import CtxRoleConverter from utils.utils import str2bool from functools import reduce import random", "pode deletar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - delete role failed\")", "@commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True) async def create(self, ctx, *, args: str =", "role_name) if role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O cargo <@&{role.id}> já", "== \"channel\" and msg.channel.category != None: option = msg.channel.category.name + \" - \"", "# Just for now self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1])", "channel.type.name.lower() == \"text\" and channel.category != None: option = channel.category.name + \" -", "msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: # New Role Created! new_role = await guild.create_role(name=role_name, mentionable=True)", "que um chat é linkado a uma categoria! if before.category != None: role_name", "sempre que um chat é linkado a uma categoria! if before.category != None:", "r await role.delete() self.log.debug(f\"Role '{option}' deleted because linked channel was deleted\") break return", "async def create(self, ctx, *, args: str = \"channel\"): \"\"\"Create a new role", "= str2bool(splitted_args[-1]) perm = splitted_args[-2] mode = splitted_args[-3] role_name = ' '.join(splitted_args[:-3]) status,", "return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True) async def delete(self, ctx, *, role:", "tasks from discord.ext.commands import has_permissions, CheckFailure from utils.converters import CtxRoleConverter from utils.utils import", "JÁ EXISTE!\", description= f\"O cargo <@&{role.id}> já está no servidor, não precisa criar", "await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels = True) async def permission(self,", "\"channel\": option = msg.channel.name elif type.lower() == \"category\": option = msg.channel.category.name else: raise", "Manager is useful to create and delete roles. You can link a role", "or just create a role with a name that you like! \"\"\" def", "ctx.guild author = ctx.author msg = ctx.message role_exists, role = await self.role_exists(ctx, role_name)", "@create.error async def create_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure):", "# TODO: Parent class too async def role_exists(self, ctx, role_name): \"\"\" Method to", "splitted_args[-2] mode = splitted_args[-3] role_name = ' '.join(splitted_args[:-3]) status, role = await self.role_exists(ctx,", "funcionando await role.edit(permission = overwrite) self.log.debug( (f'Permission {perm} was changed to {can} in", "# TODO: Especificar a mensagem de acordo com o cargo que foi criado!", "f\"O cargo <@&{role.id}> foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))),", "+ '/../database/utils.json', 'r') as f: info = json.load(f) # Just to log everything", "Arg List: ctx -> Discord Context role -> CtxRoleConverter mode -> channel, category", "self.channel_permissions = [ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\",", "o cargo if after.category.id == info['archives']: for r in guild.roles: if r.name ==", "role = await self.role_exists(ctx, role_name) await self._permission(ctx, role, mode, perm, can) async def", "apagado do servidor por <@{ctx.author.id}>!\") @delete.error async def delete_error(self, ctx, error): await ctx.message.delete(delay", "= ctx.author msg = ctx.message overwrite = discord.PermissionOverwrite() # Fundamental # x.attr_name =", "def on_guild_channel_delete(self, channel): target_type_channels = [\"text\", \"category\"] if channel.type.name.lower() not in target_type_channels: return", "*, args: str = \"channel\"): \"\"\"Create a new role with the given name", "role: CtxRoleConverter, mode: str, perm: str, can: bool): guild = ctx.guild author =", "Cargo!\", description= f\"O cargo <@&{new_role.id}> foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255),", "a role to a chat or just create a role with a name", "deleted\") break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True) async def create(self, ctx, *,", "if it exists. \"\"\" conv = commands.RoleConverter() # If found it # The", "\"\"): \"\"\" Arg List: ctx -> Discord Context role -> CtxRoleConverter mode ->", "ctx.send(\"**Erro:** Você não pode criar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} -", "monitor guild channels and delete a role linked to a channel if the", "category.set_permissions(role, overwrite = overwrite) elif mode == 'channel': channel = ctx.channel await channel.set_permissions(role,", "f\"Atualizada para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles = True,", "before.name # Categoria que devo deletar o cargo if after.category.id == info['archives']: for", "\"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\",", "def on_guild_channel_update(self, before, after): ''' Function to monitor guild channels and delete a", "role_exists, role = await self.role_exists(ctx, role_name) if role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\",", "why i did that shit, but i won't change elif channel.type.name.lower() == \"text\":", "author = ctx.author msg = ctx.message overwrite = discord.PermissionOverwrite() # Fundamental # x.attr_name", "MongoClient import logging # ENV from dotenv import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) +", "role_name): \"\"\" Method to check if a role exists in the current context,", "# TODO: Put it in a parent class def linked_role(self, ctx, type: str):", "good paramters like timer and other shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as", "None: role_name = before.category.name + \" - \" + before.name else: role_name =", "= [ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\",", "linked to a channel or category \"\"\" guild = ctx.guild author = ctx.author", "delete_after= self.delete_system_message) else: # New Role Created! new_role = await guild.create_role(name=role_name, mentionable=True) self.log.info(", "can) if mode == 'category': category = ctx.channel.category await category.set_permissions(role, overwrite = overwrite)", "\"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async def on_guild_channel_update(self, before,", "\"text\" and channel.category != None: option = channel.category.name + \" - \" +", "and other shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as f: info = json.load(f)", "Mudou de categoria if after.category == None: return elif (before.category == None and", "type.lower() == \"channel\" and msg.channel.category != None: option = msg.channel.category.name + \" -", "as f: info = json.load(f) # Just to log everything :D self.log =", "fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel", "await ctx.send(\"**Erro:** Você não pode criar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error}", "\"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async def on_guild_channel_update(self, before, after):", "ctx, *, args: str = \"\"): \"\"\" Arg List: ctx -> Discord Context", "channel, category or role perm -> permission to change bool -> bool\"\"\", False),", "role, mode, perm, can) async def permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\",", "at this moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db", "+ \" - \" + before.name else: role_name = before.name # Categoria que", "= self.delete_system_message) else: self.log.error(f\"{error} - delete role failed\") await ctx.send(error, delete_after = self.delete_system_message)", "info = json.load(f) # Just to log everything :D self.log = logging.getLogger(__name__) #", "bool -> bool \"\"\" await ctx.message.delete(delay = self.delete_user_message) splitted_args = args.split(' ') if", "'.get' e ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg,", "import logging # ENV from dotenv import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\")", "linked channel was deleted\") break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True) async def", "not found!\") return setattr(overwrite, perm, can) if mode == 'category': category = ctx.channel.category", "args) if args in linked_keys else args # Defining useful variables guild =", "a role with a name that you like! \"\"\" def __init__(self, client): self.client", "== None and after.category != None) or (before.category.id != after.category.id): guild = after.guild", "\"\"\" This function is used to return a name to a role linked", "was changed to {can} in role {role.name} in current category').encode('ascii', 'ignore').decode('ascii') ) fb", "info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\", \"administrator\", \"attach_files\",", "\"\"\" await ctx.message.delete(delay = self.delete_user_message) splitted_args = args.split(' ') if len(splitted_args) < 4", "\"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener()", "(I want to put it in a parent class, but i'm not sure", "\"category\"] role_name = self.linked_role(ctx, args) if args in linked_keys else args # Defining", "before, after): ''' Function to monitor guild channels and delete a role linked", "found it # The role already exists try: r = await conv.convert(ctx, role_name)", "was moved to trash ''' # Mudou de categoria if after.category == None:", "= 'Permitido' if can else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo", "-> Role mode -> channel, category or role perm -> permission to change", "(Channel moved to archives)!\") return @commands.Cog.listener() async def on_guild_channel_delete(self, channel): target_type_channels = [\"text\",", "def delete_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:**", "True, manage_channels = True) async def permission(self, ctx, *, args: str = \"\"):", "guild = ctx.guild author = ctx.author msg = ctx.message if type.lower() == \"channel\"", "r.name == role_name: await r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O cargo", "'/../database/utils.json', 'r') as f: info = json.load(f) # Just to log everything :D", "[\"channel\", \"category\"] role_name = self.linked_role(ctx, args) if args in linked_keys else args #", "error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode", "= self.delete_system_message) else: self.log.error(f\"{error} - creation of a new role failed\") await ctx.send(error,", "else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo <@&{role.id}> foi atualizado por", "= True, manage_channels = True) async def permission(self, ctx, *, args: str =", "\"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\",", "async def delete_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await", "await ctx.send(error, delete_after = self.delete_system_message) # TODO: Parent class too async def role_exists(self,", "don't know why i did that shit, but i won't change elif channel.type.name.lower()", "255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite '.get' e ele", "parent class, but i'm not sure at this moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message", "embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O cargo '{role_name}' associado ao canal foi excluído devido", "\"text\": option = channel.name else: option = channel.name for r in channel.guild.roles: if", "guild.roles: if r.name == role_name: await r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\", description=", "mode, perm, can) async def permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\", description=", "para os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") #", "CtxRoleConverter from utils.utils import str2bool from functools import reduce import random import json", "async def create_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await", "trash ''' # Mudou de categoria if after.category == None: return elif (before.category", "it # The role already exists try: r = await conv.convert(ctx, role_name) return", "= dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager is useful to create and", "\"\"\" await ctx.message.delete(delay = self.delete_user_message) linked_keys = [\"channel\", \"category\"] role_name = self.linked_role(ctx, args)", "too async def role_exists(self, ctx, role_name): \"\"\" Method to check if a role", "return True, r except commands.RoleNotFound: return False, None # TODO: Put it in", "-> bool \"\"\" await ctx.message.delete(delay = self.delete_user_message) splitted_args = args.split(' ') if len(splitted_args)", "color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role -> Role mode ->", "not in channel_permissions: self.log.debug( f\"[.permission] Permission {perm} not found!\") return setattr(overwrite, perm, can)", "4 or args == \"\": # Just for now self.log.debug(\"[.permission] Missing args\") await", "None: return elif (before.category == None and after.category != None) or (before.category.id !=", "sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: # New Role", "to put it in a parent class, but i'm not sure at this", "= self.delete_user_message) linked_keys = [\"channel\", \"category\"] role_name = self.linked_role(ctx, args) if args in", "as embed from utils.colors import * import os #DB from pymongo import MongoClient", "'{role_name}' associado ao canal foi excluído devido a movimentação do mesmo para os", "\"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\",", "\"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\"", "@has_permissions(manage_roles = True, manage_channels = True) async def permission(self, ctx, *, args: str", "') if len(splitted_args) < 4 or args == \"\": # Just for now", "channel.category.name + \" - \" + channel.name # I don't know why i", "embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O cargo <@&{role.id}> já está no servidor,", ":D self.log = logging.getLogger(__name__) # TODO: Loading things :P (I want to put", "deleted because linked channel was deleted\") break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True)", "in linked_keys else args # Defining useful variables guild = ctx.guild author =", "\" - \" + before.name else: role_name = before.name # Categoria que devo", "== role_name: await r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O cargo '{role_name}'", "está no servidor, não precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0,", "Put it in a parent class def linked_role(self, ctx, type: str): \"\"\" This", "self.role_exists(ctx, role_name) await self._permission(ctx, role, mode, perm, can) async def permission_tutorial(self, ctx): embedmsg", "f\"O cargo <@&{role.id}> já está no servidor, não precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0,", "ctx.message role_exists, role = await self.role_exists(ctx, role_name) if role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ", "parent class def linked_role(self, ctx, type: str): \"\"\" This function is used to", "# TODO: N ta funcionando await role.edit(permission = overwrite) self.log.debug( (f'Permission {perm} was", "role exists in the current context, return a status and the role, if", "json.load(f) # Just to log everything :D self.log = logging.getLogger(__name__) # TODO: Loading", "await role.delete() self.log.debug(f\"Role '{option}' deleted because linked channel was deleted\") break return @commands.command(aliases=['criar'],", "after.category != None) or (before.category.id != after.category.id): guild = after.guild info = self.guild_preferences_db.find_one({\"_id\":", "self.log.debug(f\"Role '{option}' deleted because linked channel was deleted\") break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles", "create(self, ctx, *, args: str = \"channel\"): \"\"\"Create a new role with the", "await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode criar", "não pode criar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - creation of", "delete_after = self.delete_system_message) else: self.log.error(f\"{error} - creation of a new role failed\") await", "it in a parent class, but i'm not sure at this moment) self.delete_user_message", "\"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\",", "de categoria if after.category == None: return elif (before.category == None and after.category", "# Just to log everything :D self.log = logging.getLogger(__name__) # TODO: Loading things", "= ctx.guild author = ctx.author msg = ctx.message role_exists, role = await self.role_exists(ctx,", "a mensagem de acordo com o cargo que foi criado! embedmsg = embed.createEmbed(title=\"Novo", "cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - delete role failed\") await ctx.send(error, delete_after", "foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\",", "utils.utils import str2bool from functools import reduce import random import json import utils.embed", "self.delete_system_message) else: self.log.error(f\"{error} - creation of a new role failed\") await ctx.send(error, delete_after", "if mode == 'category': category = ctx.channel.category await category.set_permissions(role, overwrite = overwrite) elif", "== info['archives']: for r in guild.roles: if r.name == role_name: await r.delete() embedmsg", "role failed\") await ctx.send(error, delete_after = self.delete_system_message) # TODO: Parent class too async", "msg.channel.category != None: option = msg.channel.category.name + \" - \" + msg.channel.name elif", "else: raise ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True) async def delete(self,", "cargo <@&{role.id}> já está no servidor, não precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255),", "it exists. \"\"\" conv = commands.RoleConverter() # If found it # The role", "ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode deletar um", "category').encode('ascii', 'ignore').decode('ascii') ) fb = 'Permitido' if can else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão", "perm, can) async def permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique", "discord.ext import commands, tasks from discord.ext.commands import has_permissions, CheckFailure from utils.converters import CtxRoleConverter", "type.lower() == \"channel\": option = msg.channel.name elif type.lower() == \"category\": option = msg.channel.category.name", "= logging.getLogger(__name__) # TODO: Loading things :P (I want to put it in", "guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO: Especificar a mensagem de acordo", "channel, category or role perm -> permission to change bool -> bool \"\"\"", "random.randint(0, 255), random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit await after.send(embed=embedmsg)", "r in channel.guild.roles: if r.name == option: role = r await role.delete() self.log.debug(f\"Role", "async def permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique a lista", "elif (before.category == None and after.category != None) or (before.category.id != after.category.id): guild", "def delete(self, ctx, *, role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:**", "self.log.debug( (f'Permission {perm} was changed to {can} in role {role.name} in current category').encode('ascii',", "Categoria que devo deletar o cargo if after.category.id == info['archives']: for r in", "a role exists in the current context, return a status and the role,", "import os #DB from pymongo import MongoClient import logging # ENV from dotenv", "255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return", "ta funcionando await role.edit(permission = overwrite) self.log.debug( (f'Permission {perm} was changed to {can}", "random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\")", "setattr(overwrite, perm, can) if mode == 'category': category = ctx.channel.category await category.set_permissions(role, overwrite", "\"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\",", "role = await self.role_exists(ctx, role_name) if role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description=", "overwrite) elif mode == 'channel': channel = ctx.channel await channel.set_permissions(role, overwrite = overwrite)", "random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite '.get' e ele será adicionado na", "that shit, but i won't change elif channel.type.name.lower() == \"text\": option = channel.name", "# If found it # The role already exists try: r = await", "pegar?\", f\"Apenas digite .get no chat do cargo ou .get {new_role.name} e ele", "moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']]", "ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do servidor por <@{ctx.author.id}>!\") @delete.error async def delete_error(self, ctx,", "ctx, type: str): \"\"\" This function is used to return a name to", "role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do servidor por <@{ctx.author.id}>!\") @delete.error async def", "= self.delete_system_message) # TODO: Parent class too async def role_exists(self, ctx, role_name): \"\"\"", "(f'Permission {perm} was changed to {can} in role {role.name} in current category').encode('ascii', 'ignore').decode('ascii')", "= msg.channel.category.name + \" - \" + msg.channel.name elif type.lower() == \"channel\": option", "return elif (before.category == None and after.category != None) or (before.category.id != after.category.id):", "= self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\",", "@commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels = True) async def permission(self, ctx, *, args:", "RoleManager(commands.Cog): \"\"\" Manager is useful to create and delete roles. You can link", "description= f\"O cargo '{role_name}' associado ao canal foi excluído devido a movimentação do", "failed\") await ctx.send(error, delete_after = self.delete_system_message) async def _permission(self, ctx, role: CtxRoleConverter, mode:", "mode == 'channel': channel = ctx.channel await channel.set_permissions(role, overwrite = overwrite) else: #", "(f\"Argumentos\", f\"\"\"role -> Role mode -> channel, category or role perm -> permission", "255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite .get no chat do cargo", "exists. \"\"\" conv = commands.RoleConverter() # If found it # The role already", "self.log.error(f\"{error} - delete role failed\") await ctx.send(error, delete_after = self.delete_system_message) async def _permission(self,", "len(splitted_args) < 4 or args == \"\": # Just for now self.log.debug(\"[.permission] Missing", "args.split(' ') if len(splitted_args) < 4 or args == \"\": # Just for", "= before.name # Categoria que devo deletar o cargo if after.category.id == info['archives']:", "categoria if after.category == None: return elif (before.category == None and after.category !=", "TODO: Parent class too async def role_exists(self, ctx, role_name): \"\"\" Method to check", "= info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\", \"administrator\",", ") fb = 'Permitido' if can else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description=", "If found it # The role already exists try: r = await conv.convert(ctx,", "Role mode -> channel, category or role perm -> permission to change bool", "description= f\"O cargo <@&{new_role.id}> foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0,", "{fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels =", "embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique a lista de argumentos e permissões\",", "255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite '.get' e ele será adicionado", "uma categoria! if before.category != None: role_name = before.category.name + \" - \"", "True, r except commands.RoleNotFound: return False, None # TODO: Put it in a", "mode: str, perm: str, can: bool): guild = ctx.guild author = ctx.author msg", "do servidor por <@{ctx.author.id}>!\") @delete.error async def delete_error(self, ctx, error): await ctx.message.delete(delay =", "paramters like timer and other shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as f:", "moved to trash ''' # Mudou de categoria if after.category == None: return", "'.join(splitted_args[:-3]) status, role = await self.role_exists(ctx, role_name) await self._permission(ctx, role, mode, perm, can)", "functools import reduce import random import json import utils.embed as embed from utils.colors", "i won't change elif channel.type.name.lower() == \"text\": option = channel.name else: option =", "str = \"channel\"): \"\"\"Create a new role with the given name \"\"\" await", "given name \"\"\" await ctx.message.delete(delay = self.delete_user_message) linked_keys = [\"channel\", \"category\"] role_name =", "category \"\"\" guild = ctx.guild author = ctx.author msg = ctx.message if type.lower()", "args\") await self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1]) perm = splitted_args[-2] mode = splitted_args[-3]", "\"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\",", "_permission(self, ctx, role: CtxRoleConverter, mode: str, perm: str, can: bool): guild = ctx.guild", "255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role -> Role mode -> channel,", "def role_exists(self, ctx, role_name): \"\"\" Method to check if a role exists in", "channel = ctx.channel await channel.set_permissions(role, overwrite = overwrite) else: # TODO: N ta", "and after.category != None) or (before.category.id != after.category.id): guild = after.guild info =", "# Nome criado sempre que um chat é linkado a uma categoria! if", "role.delete() self.log.debug(f\"Role '{option}' deleted because linked channel was deleted\") break return @commands.command(aliases=['criar'], pass_context=True)", "self.linked_role(ctx, args) if args in linked_keys else args # Defining useful variables guild", "to a role linked to a channel or category \"\"\" guild = ctx.guild", "import utils.embed as embed from utils.colors import * import os #DB from pymongo", "ctx.message.delete(delay = self.delete_user_message) splitted_args = args.split(' ') if len(splitted_args) < 4 or args", "name \"\"\" await ctx.message.delete(delay = self.delete_user_message) linked_keys = [\"channel\", \"category\"] role_name = self.linked_role(ctx,", "can link a role to a chat or just create a role with", "await r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O cargo '{role_name}' associado ao", "* import os #DB from pymongo import MongoClient import logging # ENV from", "f\"Apenas digite .get no chat do cargo ou .get {new_role.name} e ele será", "client): self.client = client # Some good paramters like timer and other shits", "'ignore').decode('ascii') ) # TODO: Especificar a mensagem de acordo com o cargo que", "(\"Como pegar?\", f\"Apenas digite .get no chat do cargo ou .get {new_role.name} e", "def create_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:**", "ctx, role_name): \"\"\" Method to check if a role exists in the current", "\" + channel.name # I don't know why i did that shit, but", "um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - delete role failed\") await ctx.send(error,", "color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite '.get' e", "guild = ctx.guild author = ctx.author msg = ctx.message role_exists, role = await", "a status and the role, if it exists. \"\"\" conv = commands.RoleConverter() #", "channels and delete a role linked to a channel if the channel was", "\" + msg.channel.name elif type.lower() == \"channel\": option = msg.channel.name elif type.lower() ==", "self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\",", "ctx.message overwrite = discord.PermissionOverwrite() # Fundamental # x.attr_name = s # setattr(x, 'attr_name',", "or role perm -> permission to change bool -> bool \"\"\" await ctx.message.delete(delay", "in a parent class def linked_role(self, ctx, type: str): \"\"\" This function is", "Permissões!\", description= f\"Verifique a lista de argumentos e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255),", "por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite", "cargo <@&{role.id}> foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[", "return a status and the role, if it exists. \"\"\" conv = commands.RoleConverter()", "!= after.category.id): guild = after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado sempre", "target_type_channels: return elif channel.type.name.lower() == \"text\" and channel.category != None: option = channel.category.name", "await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: # New Role Created! new_role = await guild.create_role(name=role_name,", "!= None: option = msg.channel.category.name + \" - \" + msg.channel.name elif type.lower()", "ctx.channel await channel.set_permissions(role, overwrite = overwrite) else: # TODO: N ta funcionando await", "role {role.name} in current category').encode('ascii', 'ignore').decode('ascii') ) fb = 'Permitido' if can else", "= await self.role_exists(ctx, role_name) if role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O", "'ignore').decode('ascii') ) fb = 'Permitido' if can else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\",", "= overwrite) elif mode == 'channel': channel = ctx.channel await channel.set_permissions(role, overwrite =", "< 4 or args == \"\": # Just for now self.log.debug(\"[.permission] Missing args\")", "criado sempre que um chat é linkado a uma categoria! if before.category !=", "255))), fields=[ (\"Como pegar?\", f\"Apenas digite '.get' e ele será adicionado na sua", "= discord.PermissionOverwrite() # Fundamental # x.attr_name = s # setattr(x, 'attr_name', s) if", "await role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do servidor por <@{ctx.author.id}>!\") @delete.error async", "{perm} was changed to {can} in role {role.name} in current category').encode('ascii', 'ignore').decode('ascii') )", "] @commands.Cog.listener() async def on_guild_channel_update(self, before, after): ''' Function to monitor guild channels", "Parent class too async def role_exists(self, ctx, role_name): \"\"\" Method to check if", "or role perm -> permission to change bool -> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item", "# x.attr_name = s # setattr(x, 'attr_name', s) if perm not in channel_permissions:", "author = ctx.author msg = ctx.message role_exists, role = await self.role_exists(ctx, role_name) if", "pode criar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - creation of a", "just create a role with a name that you like! \"\"\" def __init__(self,", "str2bool from functools import reduce import random import json import utils.embed as embed", "\"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async def", "conv.convert(ctx, role_name) return True, r except commands.RoleNotFound: return False, None # TODO: Put", "want to put it in a parent class, but i'm not sure at", "Você não pode criar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - creation", "Loading things :P (I want to put it in a parent class, but", "= channel.category.name + \" - \" + channel.name # I don't know why", "target_type_channels = [\"text\", \"category\"] if channel.type.name.lower() not in target_type_channels: return elif channel.type.name.lower() ==", "to archives)!\") return @commands.Cog.listener() async def on_guild_channel_delete(self, channel): target_type_channels = [\"text\", \"category\"] if", "e ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after=", "self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions", "# ENV from dotenv import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog):", "'r') as f: info = json.load(f) # Just to log everything :D self.log", "será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else:", "did that shit, but i won't change elif channel.type.name.lower() == \"text\": option =", "None) or (before.category.id != after.category.id): guild = after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id}) #", "type: str): \"\"\" This function is used to return a name to a", "change bool -> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for item in self.channel_permissions]), False) ],", "await self.role_exists(ctx, role_name) if role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O cargo", "and the role, if it exists. \"\"\" conv = commands.RoleConverter() # If found", "self.delete_system_message) # TODO: Parent class too async def role_exists(self, ctx, role_name): \"\"\" Method", "-> permission to change bool -> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for item in", "return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True) async def create(self, ctx, *, args: str", "permission to change bool -> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for item in self.channel_permissions]),", "EXISTE!\", description= f\"O cargo <@&{role.id}> já está no servidor, não precisa criar de", "\"view_guild_insights\" ] @commands.Cog.listener() async def on_guild_channel_update(self, before, after): ''' Function to monitor guild", "\"category\"] if channel.type.name.lower() not in target_type_channels: return elif channel.type.name.lower() == \"text\" and channel.category", "mensagem de acordo com o cargo que foi criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\",", "ctx.message.delete(delay = self.delete_user_message) linked_keys = [\"channel\", \"category\"] role_name = self.linked_role(ctx, args) if args", "in channel_permissions: self.log.debug( f\"[.permission] Permission {perm} not found!\") return setattr(overwrite, perm, can) if", "\" + before.name else: role_name = before.name # Categoria que devo deletar o", "discord from discord.ext import commands, tasks from discord.ext.commands import has_permissions, CheckFailure from utils.converters", "delete a role linked to a channel if the channel was moved to", "*, role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado", "foi excluído devido a movimentação do mesmo para os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0,", "delete role failed\") await ctx.send(error, delete_after = self.delete_system_message) async def _permission(self, ctx, role:", "msg = ctx.message overwrite = discord.PermissionOverwrite() # Fundamental # x.attr_name = s #", "msg = ctx.message if type.lower() == \"channel\" and msg.channel.category != None: option =", "= ctx.channel await channel.set_permissions(role, overwrite = overwrite) else: # TODO: N ta funcionando", "self.log.debug( f\"[.permission] Permission {perm} not found!\") return setattr(overwrite, perm, can) if mode ==", "já está no servidor, não precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255),", "= self.linked_role(ctx, args) if args in linked_keys else args # Defining useful variables", "has_permissions, CheckFailure from utils.converters import CtxRoleConverter from utils.utils import str2bool from functools import", "discord.ext.commands import has_permissions, CheckFailure from utils.converters import CtxRoleConverter from utils.utils import str2bool from", "args # Defining useful variables guild = ctx.guild author = ctx.author msg =", "# Defining useful variables guild = ctx.guild author = ctx.author msg = ctx.message", "= ctx.author msg = ctx.message role_exists, role = await self.role_exists(ctx, role_name) if role_exists:", "precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\",", "= True) async def permission(self, ctx, *, args: str = \"\"): \"\"\" Arg", "MongoClient(ENV['MONGODB']) self.guild_preferences_db = self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\",", "= msg.channel.category.name else: raise ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True) async", "role linked to a channel if the channel was moved to trash '''", "== \"text\" and channel.category != None: option = channel.category.name + \" - \"", "Nome criado sempre que um chat é linkado a uma categoria! if before.category", "\"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async def on_guild_channel_update(self, before, after): ''' Function to", "self.role_exists(ctx, role_name) if role_exists: embedmsg = embed.createEmbed(title=\"CARGO JÁ EXISTE!\", description= f\"O cargo <@&{role.id}>", "não precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como", "= await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role '{new_role.name}' created in guild {guild.name} :", "@commands.Cog.listener() async def on_guild_channel_update(self, before, after): ''' Function to monitor guild channels and", "msg.channel.category.name + \" - \" + msg.channel.name elif type.lower() == \"channel\": option =", "create and delete roles. You can link a role to a chat or", "ctx.message.delete(delay= self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do servidor por <@{ctx.author.id}>!\")", "sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error async def create_error(self, ctx,", "current category').encode('ascii', 'ignore').decode('ascii') ) fb = 'Permitido' if can else 'Proibido' embedmsg =", "\"send_tts_messages\", \"speak\", \"stream\", \"use_external_emojis\", \"use_slash_commands\", \"use_voice_activation\", \"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async", "List: ctx -> Discord Context role -> CtxRoleConverter mode -> channel, category or", "# Send that shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel moved to archives)!\")", "ou .get {new_role.name} e ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\")", "f\"\"\"role -> Role mode -> channel, category or role perm -> permission to", "change elif channel.type.name.lower() == \"text\": option = channel.name else: option = channel.name for", "import random import json import utils.embed as embed from utils.colors import * import", "permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Argumentos\", f\"\"\"role -> Role mode", "= r await role.delete() self.log.debug(f\"Role '{option}' deleted because linked channel was deleted\") break", "def linked_role(self, ctx, type: str): \"\"\" This function is used to return a", "dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager is useful to", "in role {role.name} in current category').encode('ascii', 'ignore').decode('ascii') ) fb = 'Permitido' if can", "await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role '{new_role.name}' created in guild {guild.name} : {guild.id}\").encode('ascii',", "channel): target_type_channels = [\"text\", \"category\"] if channel.type.name.lower() not in target_type_channels: return elif channel.type.name.lower()", "to a chat or just create a role with a name that you", "return setattr(overwrite, perm, can) if mode == 'category': category = ctx.channel.category await category.set_permissions(role,", "CtxRoleConverter mode -> channel, category or role perm -> permission to change bool", "color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit", "Some good paramters like timer and other shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r')", "description= f\"Verifique a lista de argumentos e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0,", "{perm} not found!\") return setattr(overwrite, perm, can) if mode == 'category': category =", "'category': category = ctx.channel.category await category.set_permissions(role, overwrite = overwrite) elif mode == 'channel':", "== 'channel': channel = ctx.channel await channel.set_permissions(role, overwrite = overwrite) else: # TODO:", "to change bool -> bool \"\"\" await ctx.message.delete(delay = self.delete_user_message) splitted_args = args.split('", "channel.name else: option = channel.name for r in channel.guild.roles: if r.name == option:", "channel.set_permissions(role, overwrite = overwrite) else: # TODO: N ta funcionando await role.edit(permission =", "(before.category.id != after.category.id): guild = after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado", "is useful to create and delete roles. You can link a role to", "# Categoria que devo deletar o cargo if after.category.id == info['archives']: for r", "ctx.author msg = ctx.message overwrite = discord.PermissionOverwrite() # Fundamental # x.attr_name = s", "option = channel.category.name + \" - \" + channel.name # I don't know", "know why i did that shit, but i won't change elif channel.type.name.lower() ==", "shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as f: info = json.load(f) # Just", "do mesmo para os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ ],", "do cargo ou .get {new_role.name} e ele será adicionado na sua conta\", False)", "other shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as f: info = json.load(f) #", "= ctx.guild author = ctx.author msg = ctx.message if type.lower() == \"channel\" and", "discord.PermissionOverwrite() # Fundamental # x.attr_name = s # setattr(x, 'attr_name', s) if perm", "criar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - creation of a new", "um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - creation of a new role", "exists in the current context, return a status and the role, if it", "can else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo <@&{role.id}> foi atualizado", "import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager is useful", "= [\"text\", \"category\"] if channel.type.name.lower() not in target_type_channels: return elif channel.type.name.lower() == \"text\"", "role_name: await r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O cargo '{role_name}' associado", "json import utils.embed as embed from utils.colors import * import os #DB from", "a uma categoria! if before.category != None: role_name = before.category.name + \" -", "com o cargo que foi criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo", "but i won't change elif channel.type.name.lower() == \"text\": option = channel.name else: option", "== \"\": # Just for now self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx) return; can", "'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo <@&{role.id}> foi atualizado por <@{author.id}>\",", "isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode deletar um cargo!\", delete_after = self.delete_system_message)", "after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado sempre que um chat é", "self.log.error(f\"{error} - creation of a new role failed\") await ctx.send(error, delete_after = self.delete_system_message)", "to change bool -> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for item in self.channel_permissions]), False)", "if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode criar um cargo!\", delete_after =", "for r in guild.roles: if r.name == role_name: await r.delete() embedmsg = embed.createEmbed(title=\"Cargo", "= before.category.name + \" - \" + before.name else: role_name = before.name #", "ctx.channel.category await category.set_permissions(role, overwrite = overwrite) elif mode == 'channel': channel = ctx.channel", "if args in linked_keys else args # Defining useful variables guild = ctx.guild", "description= f\"O cargo <@&{role.id}> já está no servidor, não precisa criar de novo!🍻\",", "\"/../.env\") class RoleManager(commands.Cog): \"\"\" Manager is useful to create and delete roles. You", "False, None # TODO: Put it in a parent class def linked_role(self, ctx,", "permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique a lista de argumentos", "info['archives']: for r in guild.roles: if r.name == role_name: await r.delete() embedmsg =", "TODO: Put it in a parent class def linked_role(self, ctx, type: str): \"\"\"", "random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg)", "channel.category != None: option = channel.category.name + \" - \" + channel.name #", "embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O cargo '{role_name}' associado ao canal foi", "if channel.type.name.lower() not in target_type_channels: return elif channel.type.name.lower() == \"text\" and channel.category !=", "type.lower() == \"category\": option = msg.channel.category.name else: raise ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True)", "digite .get no chat do cargo ou .get {new_role.name} e ele será adicionado", "fb = 'Permitido' if can else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description= f\"O", "Permission {perm} not found!\") return setattr(overwrite, perm, can) if mode == 'category': category", "# Fundamental # x.attr_name = s # setattr(x, 'attr_name', s) if perm not", "commands, tasks from discord.ext.commands import has_permissions, CheckFailure from utils.converters import CtxRoleConverter from utils.utils", "category or role perm -> permission to change bool -> bool\"\"\", False), (f\"Permissões\",", "utils.colors import * import os #DB from pymongo import MongoClient import logging #", "f\"[.permission] Permission {perm} not found!\") return setattr(overwrite, perm, can) if mode == 'category':", "\"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\",", "False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: # New Role Created! new_role", "msg.channel.category.name else: raise ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True) async def", "from utils.colors import * import os #DB from pymongo import MongoClient import logging", "= True) async def delete(self, ctx, *, role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await", "canal foi excluído devido a movimentação do mesmo para os arquivos.\", color=rgb_to_int((random.randint(0, 255),", "await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel moved to archives)!\") return @commands.Cog.listener() async def", "can: bool): guild = ctx.guild author = ctx.author msg = ctx.message overwrite =", "self.delete_user_message) linked_keys = [\"channel\", \"category\"] role_name = self.linked_role(ctx, args) if args in linked_keys", "\"channel\" and msg.channel.category != None: option = msg.channel.category.name + \" - \" +", "setattr(x, 'attr_name', s) if perm not in channel_permissions: self.log.debug( f\"[.permission] Permission {perm} not", "== \"text\": option = channel.name else: option = channel.name for r in channel.guild.roles:", "create a role with a name that you like! \"\"\" def __init__(self, client):", "new role with the given name \"\"\" await ctx.message.delete(delay = self.delete_user_message) linked_keys =", "CheckFailure): await ctx.send(\"**Erro:** Você não pode deletar um cargo!\", delete_after = self.delete_system_message) else:", "in guild.roles: if r.name == role_name: await r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\",", "to log everything :D self.log = logging.getLogger(__name__) # TODO: Loading things :P (I", "f\"O cargo '{role_name}' associado ao canal foi excluído devido a movimentação do mesmo", "\"\"\" Manager is useful to create and delete roles. You can link a", "= ctx.message overwrite = discord.PermissionOverwrite() # Fundamental # x.attr_name = s # setattr(x,", "name that you like! \"\"\" def __init__(self, client): self.client = client # Some", "ctx.author msg = ctx.message role_exists, role = await self.role_exists(ctx, role_name) if role_exists: embedmsg", "fields=[ (\"Como pegar?\", f\"Apenas digite '.get' e ele será adicionado na sua conta\",", "the role, if it exists. \"\"\" conv = commands.RoleConverter() # If found it", "\"\"\" guild = ctx.guild author = ctx.author msg = ctx.message if type.lower() ==", "# TODO: Loading things :P (I want to put it in a parent", "ctx.author msg = ctx.message if type.lower() == \"channel\" and msg.channel.category != None: option", "self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do servidor por <@{ctx.author.id}>!\") @delete.error", "arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that", "open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json', 'r') as f: info = json.load(f) # Just to log", "ctx.send(\"**Erro:** Você não pode deletar um cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} -", "await msg.channel.send(embed=embedmsg) return @create.error async def create_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message)", "a role linked to a channel or category \"\"\" guild = ctx.guild author", "def permission(self, ctx, *, args: str = \"\"): \"\"\" Arg List: ctx ->", "class, but i'm not sure at this moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message =", "self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode deletar um cargo!\", delete_after", "excluído!\", description= f\"O cargo '{role_name}' associado ao canal foi excluído devido a movimentação", "linked to a channel if the channel was moved to trash ''' #", "Created! new_role = await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role '{new_role.name}' created in guild", "elif channel.type.name.lower() == \"text\" and channel.category != None: option = channel.category.name + \"", "<@&{role.id}> já está no servidor, não precisa criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0,", "conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await msg.channel.send(embed=embedmsg, delete_after= self.delete_system_message) else: # New Role Created!", "== \"channel\": option = msg.channel.name elif type.lower() == \"category\": option = msg.channel.category.name else:", "option = msg.channel.category.name else: raise ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True)", "devo deletar o cargo if after.category.id == info['archives']: for r in guild.roles: if", "ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não", "perm = splitted_args[-2] mode = splitted_args[-3] role_name = ' '.join(splitted_args[:-3]) status, role =", "# Some good paramters like timer and other shits with open(os.path.dirname(os.path.abspath(__file__)) + '/../database/utils.json',", "\"value\", \"view_audit_log\", \"view_channel\", \"view_guild_insights\" ] @commands.Cog.listener() async def on_guild_channel_update(self, before, after): ''' Function", "+ \" - \" + msg.channel.name elif type.lower() == \"channel\": option = msg.channel.name", "== option: role = r await role.delete() self.log.debug(f\"Role '{option}' deleted because linked channel", "in target_type_channels: return elif channel.type.name.lower() == \"text\" and channel.category != None: option =", "255), random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para {fb}\", False) ], img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await", "= ctx.message role_exists, role = await self.role_exists(ctx, role_name) if role_exists: embedmsg = embed.createEmbed(title=\"CARGO", "now self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1]) perm = splitted_args[-2]", ".get no chat do cargo ou .get {new_role.name} e ele será adicionado na", "255))), fields=[ (\"Como pegar?\", f\"Apenas digite .get no chat do cargo ou .get", ": {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO: Especificar a mensagem de acordo com o", "if type.lower() == \"channel\" and msg.channel.category != None: option = msg.channel.category.name + \"", "\"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\", \"speak\",", "role = r await role.delete() self.log.debug(f\"Role '{option}' deleted because linked channel was deleted\")", "await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode deletar", "None: option = msg.channel.category.name + \" - \" + msg.channel.name elif type.lower() ==", "sure at this moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB'])", "role.edit(permission = overwrite) self.log.debug( (f'Permission {perm} was changed to {can} in role {role.name}", "-> CtxRoleConverter mode -> channel, category or role perm -> permission to change", "role_name) return True, r except commands.RoleNotFound: return False, None # TODO: Put it", "import discord from discord.ext import commands, tasks from discord.ext.commands import has_permissions, CheckFailure from", "import json import utils.embed as embed from utils.colors import * import os #DB", "import * import os #DB from pymongo import MongoClient import logging # ENV", "before.category.name + \" - \" + before.name else: role_name = before.name # Categoria", "acordo com o cargo que foi criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description= f\"O", "= self.delete_system_message) async def _permission(self, ctx, role: CtxRoleConverter, mode: str, perm: str, can:", "this moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client = MongoClient(ENV['MONGODB']) self.guild_preferences_db =", "(f\"New role '{new_role.name}' created in guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO:", "create_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você", "lista de argumentos e permissões\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Argumentos\",", "associado ao canal foi excluído devido a movimentação do mesmo para os arquivos.\",", "digite '.get' e ele será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/814010519022600192.png?v=1\") await", "put it in a parent class, but i'm not sure at this moment)", "s) if perm not in channel_permissions: self.log.debug( f\"[.permission] Permission {perm} not found!\") return", "await channel.set_permissions(role, overwrite = overwrite) else: # TODO: N ta funcionando await role.edit(permission", "Just to log everything :D self.log = logging.getLogger(__name__) # TODO: Loading things :P", "permission to change bool -> bool \"\"\" await ctx.message.delete(delay = self.delete_user_message) splitted_args =", "linked_role(self, ctx, type: str): \"\"\" This function is used to return a name", "ctx.guild author = ctx.author msg = ctx.message if type.lower() == \"channel\" and msg.channel.category", "i'm not sure at this moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client", "async def on_guild_channel_delete(self, channel): target_type_channels = [\"text\", \"category\"] if channel.type.name.lower() not in target_type_channels:", "import has_permissions, CheckFailure from utils.converters import CtxRoleConverter from utils.utils import str2bool from functools", "role '{new_role.name}' created in guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO: Especificar", "r in guild.roles: if r.name == role_name: await r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado", "but i'm not sure at this moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message']", "You can link a role to a chat or just create a role", "chat do cargo ou .get {new_role.name} e ele será adicionado na sua conta\",", "= s # setattr(x, 'attr_name', s) if perm not in channel_permissions: self.log.debug( f\"[.permission]", "por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada para", "won't change elif channel.type.name.lower() == \"text\": option = channel.name else: option = channel.name", "check if a role exists in the current context, return a status and", "a channel if the channel was moved to trash ''' # Mudou de", "Send that shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel moved to archives)!\") return", "= channel.name for r in channel.guild.roles: if r.name == option: role = r", "a new role failed\") await ctx.send(error, delete_after = self.delete_system_message) # TODO: Parent class", "import reduce import random import json import utils.embed as embed from utils.colors import", "= True) async def create(self, ctx, *, args: str = \"channel\"): \"\"\"Create a", "return @commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels = True) async def permission(self, ctx, *,", "on_guild_channel_update(self, before, after): ''' Function to monitor guild channels and delete a role", "True) async def delete(self, ctx, *, role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await role.delete()", "no chat do cargo ou .get {new_role.name} e ele será adicionado na sua", "logging # ENV from dotenv import dotenv_values ENV = dotenv_values(os.path.dirname(os.path.abspath(__file__)) + \"/../.env\") class", "self.delete_user_message) splitted_args = args.split(' ') if len(splitted_args) < 4 or args == \"\":", "\"\\n\".join([item for item in self.channel_permissions]), False) ], img=\"https://cdn.discordapp.com/emojis/767241157003837460.png?v=1\") await ctx.send(embed=embedmsg) # Setup def", "perm -> permission to change bool -> bool \"\"\" await ctx.message.delete(delay = self.delete_user_message)", "log everything :D self.log = logging.getLogger(__name__) # TODO: Loading things :P (I want", "servidor por <@{ctx.author.id}>!\") @delete.error async def delete_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message)", "in guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO: Especificar a mensagem de", "ctx, *, role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}'", "= ctx.author msg = ctx.message if type.lower() == \"channel\" and msg.channel.category != None:", "a parent class, but i'm not sure at this moment) self.delete_user_message = info['utils']['delete_user_message']", "Role Created! new_role = await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role '{new_role.name}' created in", "= await self.role_exists(ctx, role_name) await self._permission(ctx, role, mode, perm, can) async def permission_tutorial(self,", "!= None) or (before.category.id != after.category.id): guild = after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id})", "\"\"\" conv = commands.RoleConverter() # If found it # The role already exists", "self.log.debug(f\"Role {role_name} deleted (Channel moved to archives)!\") return @commands.Cog.listener() async def on_guild_channel_delete(self, channel):", "Discord Context role -> CtxRoleConverter mode -> channel, category or role perm ->", "cargo if after.category.id == info['archives']: for r in guild.roles: if r.name == role_name:", "overwrite) self.log.debug( (f'Permission {perm} was changed to {can} in role {role.name} in current", "after.category.id == info['archives']: for r in guild.roles: if r.name == role_name: await r.delete()", "from discord.ext.commands import has_permissions, CheckFailure from utils.converters import CtxRoleConverter from utils.utils import str2bool", "novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas digite '.get'", "\"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\",", "foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\",", "msg = ctx.message role_exists, role = await self.role_exists(ctx, role_name) if role_exists: embedmsg =", "a chat or just create a role with a name that you like!", "not sure at this moment) self.delete_user_message = info['utils']['delete_user_message'] self.delete_system_message = info['utils']['delete_system_message'] self.db_client =", "self.db_client[info['mongo']['database']][info['mongo']['collection']] self.channel_permissions = [ \"add_reactions\", \"administrator\", \"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\",", "\"attach_files\", \"ban_members\", \"change_nickname\", \"connect\", \"create_instant_invite\", \"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\",", "self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1]) perm = splitted_args[-2] mode", "if before.category != None: role_name = before.category.name + \" - \" + before.name", "option = msg.channel.category.name + \" - \" + msg.channel.name elif type.lower() == \"channel\":", "adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error async def", "{guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO: Especificar a mensagem de acordo com", "r except commands.RoleNotFound: return False, None # TODO: Put it in a parent", "item in self.channel_permissions]), False) ], img=\"https://cdn.discordapp.com/emojis/767241157003837460.png?v=1\") await ctx.send(embed=embedmsg) # Setup def setup(client): client.add_cog(RoleManager(client))", "\"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\", \"send_messages\", \"send_tts_messages\",", "function is used to return a name to a role linked to a", "args == \"\": # Just for now self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx) return;", "a movimentação do mesmo para os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))),", "-> channel, category or role perm -> permission to change bool -> bool\"\"\",", "= \"channel\"): \"\"\"Create a new role with the given name \"\"\" await ctx.message.delete(delay", "Missing args\") await self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1]) perm = splitted_args[-2] mode =", "utils.embed as embed from utils.colors import * import os #DB from pymongo import", "class def linked_role(self, ctx, type: str): \"\"\" This function is used to return", "from discord.ext import commands, tasks from discord.ext.commands import has_permissions, CheckFailure from utils.converters import", "guild.id}) # Nome criado sempre que um chat é linkado a uma categoria!", "The role already exists try: r = await conv.convert(ctx, role_name) return True, r", "cargo!\", delete_after = self.delete_system_message) else: self.log.error(f\"{error} - creation of a new role failed\")", "CheckFailure): await ctx.send(\"**Erro:** Você não pode criar um cargo!\", delete_after = self.delete_system_message) else:", "delete_after = self.delete_system_message) # TODO: Parent class too async def role_exists(self, ctx, role_name):", "archives)!\") return @commands.Cog.listener() async def on_guild_channel_delete(self, channel): target_type_channels = [\"text\", \"category\"] if channel.type.name.lower()", "break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True) async def create(self, ctx, *, args:", "== \"category\": option = msg.channel.category.name else: raise ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles", "ctx.send(error, delete_after = self.delete_system_message) async def _permission(self, ctx, role: CtxRoleConverter, mode: str, perm:", "ctx): embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique a lista de argumentos e", "commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do servidor", "things :P (I want to put it in a parent class, but i'm", "= json.load(f) # Just to log everything :D self.log = logging.getLogger(__name__) # TODO:", "255), random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit await after.send(embed=embedmsg) self.log.debug(f\"Role", "perm -> permission to change bool -> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for item", "embed from utils.colors import * import os #DB from pymongo import MongoClient import", "criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas", "+ msg.channel.name elif type.lower() == \"channel\": option = msg.channel.name elif type.lower() == \"category\":", "= embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo <@&{new_role.id}> foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255),", "\"deafen_members\", \"embed_links\", \"external_emojis\", \"kick_members\", \"manage_channels\", \"manage_emojis\", \"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\",", "if the channel was moved to trash ''' # Mudou de categoria if", "cargo ou .get {new_role.name} e ele será adicionado na sua conta\", False) ],", "overwrite) else: # TODO: N ta funcionando await role.edit(permission = overwrite) self.log.debug( (f'Permission", "str): \"\"\" This function is used to return a name to a role", "if a role exists in the current context, return a status and the", "after.category.id): guild = after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado sempre que", "atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (f\"Permissão '{perm}'\", f\"Atualizada", "mode -> channel, category or role perm -> permission to change bool ->", "splitted_args = args.split(' ') if len(splitted_args) < 4 or args == \"\": #", "class RoleManager(commands.Cog): \"\"\" Manager is useful to create and delete roles. You can", "devido a movimentação do mesmo para os arquivos.\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0,", "str, can: bool): guild = ctx.guild author = ctx.author msg = ctx.message overwrite", "async def _permission(self, ctx, role: CtxRoleConverter, mode: str, perm: str, can: bool): guild", "= ctx.message if type.lower() == \"channel\" and msg.channel.category != None: option = msg.channel.category.name", "pass_context=True) @has_permissions(manage_roles = True) async def delete(self, ctx, *, role: commands.RoleConverter): await ctx.message.delete(delay=", "can = str2bool(splitted_args[-1]) perm = splitted_args[-2] mode = splitted_args[-3] role_name = ' '.join(splitted_args[:-3])", "de acordo com o cargo que foi criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description=", "if r.name == option: role = r await role.delete() self.log.debug(f\"Role '{option}' deleted because", "or args == \"\": # Just for now self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx)", "'{new_role.name}' created in guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) # TODO: Especificar a", "permission(self, ctx, *, args: str = \"\"): \"\"\" Arg List: ctx -> Discord", "== 'category': category = ctx.channel.category await category.set_permissions(role, overwrite = overwrite) elif mode ==", "\"category\": option = msg.channel.category.name else: raise ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles =", "(f\"Permissões\", \"\\n\".join([item for item in self.channel_permissions]), False) ], img=\"https://cdn.discordapp.com/emojis/767241157003837460.png?v=1\") await ctx.send(embed=embedmsg) # Setup", "role perm -> permission to change bool -> bool \"\"\" await ctx.message.delete(delay =", "if can else 'Proibido' embedmsg = embed.createEmbed(title=\"Permissão alterada!\", description= f\"O cargo <@&{role.id}> foi", "await ctx.send(f\"**AVISO:** Cargo '{role.name}' apagado do servidor por <@{ctx.author.id}>!\") @delete.error async def delete_error(self,", "cargo que foi criado! embedmsg = embed.createEmbed(title=\"Novo Cargo!\", description= f\"O cargo <@&{new_role.id}> foi", "channel was deleted\") break return @commands.command(aliases=['criar'], pass_context=True) @has_permissions(manage_roles = True) async def create(self,", "not in target_type_channels: return elif channel.type.name.lower() == \"text\" and channel.category != None: option", "overwrite = overwrite) else: # TODO: N ta funcionando await role.edit(permission = overwrite)", "+ \" - \" + channel.name # I don't know why i did", "roles. You can link a role to a chat or just create a", "criar de novo!🍻\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))), fields=[ (\"Como pegar?\", f\"Apenas", "-> Discord Context role -> CtxRoleConverter mode -> channel, category or role perm", "after): ''' Function to monitor guild channels and delete a role linked to", "await category.set_permissions(role, overwrite = overwrite) elif mode == 'channel': channel = ctx.channel await", "return a name to a role linked to a channel or category \"\"\"", "por <@{ctx.author.id}>!\") @delete.error async def delete_error(self, ctx, error): await ctx.message.delete(delay = self.delete_user_message) if", "self.log = logging.getLogger(__name__) # TODO: Loading things :P (I want to put it", "guild channels and delete a role linked to a channel if the channel", "can) async def permission_tutorial(self, ctx): embedmsg = embed.createEmbed(title=\"Configurações de Permissões!\", description= f\"Verifique a", "async def delete(self, ctx, *, role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await role.delete() await", "255), random.randint(0, 255), random.randint(0, 255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit await", "variables guild = ctx.guild author = ctx.author msg = ctx.message role_exists, role =", "return elif channel.type.name.lower() == \"text\" and channel.category != None: option = channel.category.name +", "pass_context=True) @has_permissions(manage_roles = True) async def create(self, ctx, *, args: str = \"channel\"):", "self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado sempre que um chat é linkado a uma", "será adicionado na sua conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error async", "], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error async def create_error(self, ctx, error): await ctx.message.delete(delay", "isinstance(error, CheckFailure): await ctx.send(\"**Erro:** Você não pode criar um cargo!\", delete_after = self.delete_system_message)", "bool): guild = ctx.guild author = ctx.author msg = ctx.message overwrite = discord.PermissionOverwrite()", "alterada!\", description= f\"O cargo <@&{role.id}> foi atualizado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255),", "self.log.info( (f\"New role '{new_role.name}' created in guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') ) #", "f\"O cargo <@&{new_role.id}> foi criado por <@{author.id}>\", color=rgb_to_int((random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))),", "delete(self, ctx, *, role: commands.RoleConverter): await ctx.message.delete(delay= self.delete_user_message) await role.delete() await ctx.send(f\"**AVISO:** Cargo", "after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted (Channel moved to archives)!\") return @commands.Cog.listener() async def on_guild_channel_delete(self,", "s # setattr(x, 'attr_name', s) if perm not in channel_permissions: self.log.debug( f\"[.permission] Permission", "if r.name == role_name: await r.delete() embedmsg = embed.createEmbed(title=\"Cargo associado excluído!\", description= f\"O", "img=\"https://cdn.discordapp.com/emojis/765969524897218594.png?v=1\") await msg.channel.send(embed=embedmsg) return @commands.command(pass_context=True) @has_permissions(manage_roles = True, manage_channels = True) async def", "else args # Defining useful variables guild = ctx.guild author = ctx.author msg", "to monitor guild channels and delete a role linked to a channel if", "to return a name to a role linked to a channel or category", "to create and delete roles. You can link a role to a chat", "everything :D self.log = logging.getLogger(__name__) # TODO: Loading things :P (I want to", "um chat é linkado a uma categoria! if before.category != None: role_name =", "Just for now self.log.debug(\"[.permission] Missing args\") await self.permission_tutorial(ctx) return; can = str2bool(splitted_args[-1]) perm", "255))), fields=[ ], img=\"https://cdn.discordapp.com/emojis/753575574546415656.png?v=1\") # Send that shit await after.send(embed=embedmsg) self.log.debug(f\"Role {role_name} deleted", "- \" + msg.channel.name elif type.lower() == \"channel\": option = msg.channel.name elif type.lower()", "str, perm: str, can: bool): guild = ctx.guild author = ctx.author msg =", "\"manage_guild\", \"manage_messages\", \"manage_nicknames\", \"manage_permissions\", \"manage_roles\", \"manage_webhooks\", \"mention_everyone\", \"move_members\", \"mute_members\", \"priority_speaker\", \"read_message_history\", \"read_messages\", \"request_to_speak\",", "'channel': channel = ctx.channel await channel.set_permissions(role, overwrite = overwrite) else: # TODO: N", "conta\", False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error async def create_error(self, ctx, error):", "info = self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome criado sempre que um chat é linkado", "mentionable=True) self.log.info( (f\"New role '{new_role.name}' created in guild {guild.name} : {guild.id}\").encode('ascii', 'ignore').decode('ascii') )", "# Mudou de categoria if after.category == None: return elif (before.category == None", "await conv.convert(ctx, role_name) return True, r except commands.RoleNotFound: return False, None # TODO:", "New Role Created! new_role = await guild.create_role(name=role_name, mentionable=True) self.log.info( (f\"New role '{new_role.name}' created", "else: self.log.error(f\"{error} - creation of a new role failed\") await ctx.send(error, delete_after =", "__init__(self, client): self.client = client # Some good paramters like timer and other", "bool -> bool\"\"\", False), (f\"Permissões\", \"\\n\".join([item for item in self.channel_permissions]), False) ], img=\"https://cdn.discordapp.com/emojis/767241157003837460.png?v=1\")", "chat é linkado a uma categoria! if before.category != None: role_name = before.category.name", "or (before.category.id != after.category.id): guild = after.guild info = self.guild_preferences_db.find_one({\"_id\": guild.id}) # Nome", "used to return a name to a role linked to a channel or", "False) ], img=\"https://cdn.discordapp.com/emojis/859150737509580800.gif?v=1\") await msg.channel.send(embed=embedmsg) return @create.error async def create_error(self, ctx, error): await", "ValueError(\"\") return option; @commands.command(aliases=['deletar'], pass_context=True) @has_permissions(manage_roles = True) async def delete(self, ctx, *,", "@commands.Cog.listener() async def on_guild_channel_delete(self, channel): target_type_channels = [\"text\", \"category\"] if channel.type.name.lower() not in", "ctx, role: CtxRoleConverter, mode: str, perm: str, can: bool): guild = ctx.guild author" ]
[ "'polymorphic_identity': 'attribute', 'polymorphic_on': type } class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id = Column(Integer,", "'polymorphic_on': type } class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True)", "= Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Float) __mapper_args__ = { 'polymorphic_identity': float.__name__ }", "\"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(String(length=4096)) __mapper_args__ = { 'polymorphic_identity':", "Column(Float) __mapper_args__ = { 'polymorphic_identity': float.__name__ } class StringAttribute(Attribute): __tablename__ = \"string_attribute\" id", "= \"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(String(length=4096)) __mapper_args__ = {", "Float from ..db import Base class Attribute(Base): __tablename__ = \"attribute\" id = Column(Integer,", "type } class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value", "unique=True) __mapper_args__ = { 'polymorphic_identity': 'attribute', 'polymorphic_on': type } class BooleanAttribute(Attribute): __tablename__ =", "= { 'polymorphic_identity': float.__name__ } class StringAttribute(Attribute): __tablename__ = \"string_attribute\" id = Column(Integer,", "= Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(String(length=4096)) __mapper_args__ = { 'polymorphic_identity': str.__name__ }", "\"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Integer) __mapper_args__ = { 'polymorphic_identity':", "BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean) __mapper_args__", "__tablename__ = \"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean) __mapper_args__ =", "ForeignKey('attribute.id'), primary_key=True) value = Column(Integer) __mapper_args__ = { 'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute):", "Column(String(length=256), nullable=False) remote_reference = Column(String(256), nullable=False) key = Column(String(length=256), unique=True) __mapper_args__ = {", "'polymorphic_identity': float.__name__ } class StringAttribute(Attribute): __tablename__ = \"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True)", "id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean) __mapper_args__ = { 'polymorphic_identity': bool.__name__", "= { 'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id = Column(Integer,", "{ 'polymorphic_identity': 'attribute', 'polymorphic_on': type } class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id =", "bool.__name__ } class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value", "value = Column(Float) __mapper_args__ = { 'polymorphic_identity': float.__name__ } class StringAttribute(Attribute): __tablename__ =", "{ 'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'),", "= Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Integer) __mapper_args__ = { 'polymorphic_identity': int.__name__ }", "} class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value =", "'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True)", "'attribute', 'polymorphic_on': type } class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'),", "class StringAttribute(Attribute): __tablename__ = \"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(String(length=4096))", "primary_key=True) value = Column(Float) __mapper_args__ = { 'polymorphic_identity': float.__name__ } class StringAttribute(Attribute): __tablename__", "__tablename__ = \"attribute\" id = Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type = Column(String(length=256),", "autoincrement=True, primary_key=True, unique=True, nullable=False) type = Column(String(length=256), nullable=False) remote_reference = Column(String(256), nullable=False) key", "float.__name__ } class StringAttribute(Attribute): __tablename__ = \"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value", "Boolean, ForeignKey, Integer, Float from ..db import Base class Attribute(Base): __tablename__ = \"attribute\"", "\"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean) __mapper_args__ = { 'polymorphic_identity':", "Integer, Float from ..db import Base class Attribute(Base): __tablename__ = \"attribute\" id =", "id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Float) __mapper_args__ = { 'polymorphic_identity': float.__name__", "String, Boolean, ForeignKey, Integer, Float from ..db import Base class Attribute(Base): __tablename__ =", "nullable=False) key = Column(String(length=256), unique=True) __mapper_args__ = { 'polymorphic_identity': 'attribute', 'polymorphic_on': type }", "Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type = Column(String(length=256), nullable=False) remote_reference = Column(String(256), nullable=False)", "} class StringAttribute(Attribute): __tablename__ = \"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value =", "= Column(Float) __mapper_args__ = { 'polymorphic_identity': float.__name__ } class StringAttribute(Attribute): __tablename__ = \"string_attribute\"", "primary_key=True, unique=True, nullable=False) type = Column(String(length=256), nullable=False) remote_reference = Column(String(256), nullable=False) key =", "= { 'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id = Column(Integer,", "__mapper_args__ = { 'polymorphic_identity': float.__name__ } class StringAttribute(Attribute): __tablename__ = \"string_attribute\" id =", "sqlalchemy import Column, String, Boolean, ForeignKey, Integer, Float from ..db import Base class", "class Attribute(Base): __tablename__ = \"attribute\" id = Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type", "ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean) __mapper_args__ = { 'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute):", "Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Float) __mapper_args__ = { 'polymorphic_identity': float.__name__ } class", "{ 'polymorphic_identity': float.__name__ } class StringAttribute(Attribute): __tablename__ = \"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'),", "__tablename__ = \"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Float) __mapper_args__ =", "id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Integer) __mapper_args__ = { 'polymorphic_identity': int.__name__", "ForeignKey('attribute.id'), primary_key=True) value = Column(Float) __mapper_args__ = { 'polymorphic_identity': float.__name__ } class StringAttribute(Attribute):", "= \"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean) __mapper_args__ = {", "import Column, String, Boolean, ForeignKey, Integer, Float from ..db import Base class Attribute(Base):", "nullable=False) type = Column(String(length=256), nullable=False) remote_reference = Column(String(256), nullable=False) key = Column(String(length=256), unique=True)", "int.__name__ } class FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value", "= Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type = Column(String(length=256), nullable=False) remote_reference = Column(String(256),", "Column(Boolean) __mapper_args__ = { 'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id", "= Column(String(length=256), unique=True) __mapper_args__ = { 'polymorphic_identity': 'attribute', 'polymorphic_on': type } class BooleanAttribute(Attribute):", "StringAttribute(Attribute): __tablename__ = \"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(String(length=4096)) __mapper_args__", "'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True)", "id = Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type = Column(String(length=256), nullable=False) remote_reference =", "\"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Float) __mapper_args__ = { 'polymorphic_identity':", "= Column(String(length=256), nullable=False) remote_reference = Column(String(256), nullable=False) key = Column(String(length=256), unique=True) __mapper_args__ =", "Base class Attribute(Base): __tablename__ = \"attribute\" id = Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False)", "__mapper_args__ = { 'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id =", "unique=True, nullable=False) type = Column(String(length=256), nullable=False) remote_reference = Column(String(256), nullable=False) key = Column(String(length=256),", "id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(String(length=4096)) __mapper_args__ = { 'polymorphic_identity': str.__name__", "{ 'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'),", "primary_key=True) value = Column(Boolean) __mapper_args__ = { 'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute): __tablename__", "Column, String, Boolean, ForeignKey, Integer, Float from ..db import Base class Attribute(Base): __tablename__", "value = Column(Boolean) __mapper_args__ = { 'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute): __tablename__ =", "Column(String(256), nullable=False) key = Column(String(length=256), unique=True) __mapper_args__ = { 'polymorphic_identity': 'attribute', 'polymorphic_on': type", "} class FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value =", "Column(String(length=256), unique=True) __mapper_args__ = { 'polymorphic_identity': 'attribute', 'polymorphic_on': type } class BooleanAttribute(Attribute): __tablename__", "import Base class Attribute(Base): __tablename__ = \"attribute\" id = Column(Integer, autoincrement=True, primary_key=True, unique=True,", "type = Column(String(length=256), nullable=False) remote_reference = Column(String(256), nullable=False) key = Column(String(length=256), unique=True) __mapper_args__", "class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean)", "IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Integer) __mapper_args__", "= Column(Integer) __mapper_args__ = { 'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute): __tablename__ = \"float_attribute\"", "= Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean) __mapper_args__ = { 'polymorphic_identity': bool.__name__ }", "= Column(Boolean) __mapper_args__ = { 'polymorphic_identity': bool.__name__ } class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\"", "= { 'polymorphic_identity': 'attribute', 'polymorphic_on': type } class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id", "class FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Float)", "\"attribute\" id = Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type = Column(String(length=256), nullable=False) remote_reference", "Column(Integer) __mapper_args__ = { 'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id", "= Column(String(256), nullable=False) key = Column(String(length=256), unique=True) __mapper_args__ = { 'polymorphic_identity': 'attribute', 'polymorphic_on':", "key = Column(String(length=256), unique=True) __mapper_args__ = { 'polymorphic_identity': 'attribute', 'polymorphic_on': type } class", "Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Integer) __mapper_args__ = { 'polymorphic_identity': int.__name__ } class", "Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Boolean) __mapper_args__ = { 'polymorphic_identity': bool.__name__ } class", "nullable=False) remote_reference = Column(String(256), nullable=False) key = Column(String(length=256), unique=True) __mapper_args__ = { 'polymorphic_identity':", "__tablename__ = \"string_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(String(length=4096)) __mapper_args__ =", "from sqlalchemy import Column, String, Boolean, ForeignKey, Integer, Float from ..db import Base", "= \"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Float) __mapper_args__ = {", "from ..db import Base class Attribute(Base): __tablename__ = \"attribute\" id = Column(Integer, autoincrement=True,", "__tablename__ = \"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Integer) __mapper_args__ =", "Attribute(Base): __tablename__ = \"attribute\" id = Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type =", "..db import Base class Attribute(Base): __tablename__ = \"attribute\" id = Column(Integer, autoincrement=True, primary_key=True,", "remote_reference = Column(String(256), nullable=False) key = Column(String(length=256), unique=True) __mapper_args__ = { 'polymorphic_identity': 'attribute',", "FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Float) __mapper_args__", "__mapper_args__ = { 'polymorphic_identity': 'attribute', 'polymorphic_on': type } class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\"", "ForeignKey, Integer, Float from ..db import Base class Attribute(Base): __tablename__ = \"attribute\" id", "__mapper_args__ = { 'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute): __tablename__ = \"float_attribute\" id =", "value = Column(Integer) __mapper_args__ = { 'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute): __tablename__ =", "= \"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Integer) __mapper_args__ = {", "class IntegerAttribute(Attribute): __tablename__ = \"integer_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value = Column(Integer)", "primary_key=True) value = Column(Integer) __mapper_args__ = { 'polymorphic_identity': int.__name__ } class FloatAttribute(Attribute): __tablename__", "} class BooleanAttribute(Attribute): __tablename__ = \"boolean_attribute\" id = Column(Integer, ForeignKey('attribute.id'), primary_key=True) value =", "= \"attribute\" id = Column(Integer, autoincrement=True, primary_key=True, unique=True, nullable=False) type = Column(String(length=256), nullable=False)" ]
[ "is None else filename ) out_path = os.path.join(destination, filename) with open(out_path, \"wb\") as", "return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi)", "new url \"\"\" if not self._available_base_url_list: raise ValueError(\"Ran out of valid sci-hub urls\")", "Query for a paper hosted by sci-hub \"\"\" response = self._post( self._base_url, data={\"request\":", "script.string ] return doi def query(self, query): \"\"\" Query for a paper hosted", "\"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list", "\"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL", "LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client for accessing SciHub \"\"\" DEFAULT_HEADERS = { \"User-Agent\":", "def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back to", "return { \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def _download_pdf(self, url): result = self._get(url)", "= self._get_paper_meta(doi) # date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"] title", "self._sess.proxies = { \"http\": proxy, \"https\": proxy, } def _set_base_url(self): \"\"\" Pick a", "= self._get_available_scihub_urls() self._set_base_url() if proxy is not None: self._set_proxy(proxy) def _get(self, url, raise_for_status=True,", "{ \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def _download_pdf(self, url): result = self._get(url) if", "\"\"\" SciHub client \"\"\" import logging import os import random import urllib import", "headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article", "line in script.string.split(\"\\n\") if \"var doi\" in line ] for script in parsed_response.find_all(\"script\")", "{ \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\"", "\"https\": proxy, } def _set_base_url(self): \"\"\" Pick a random url from the available", "not a pdf\") return result.content def _get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\":", "_set_base_url(self): \"\"\" Pick a random url from the available scihub urls set the", "url, raise_for_status=True, **kwargs): response = self._sess.get(url, **kwargs) if raise_for_status is True: response.raise_for_status() return", "**kwargs): response = self._sess.get(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response def", "= a_tag[\"href\"] if ( \"sci-hub\" in link # pylint: disable=C0330 and link.startswith(\"https\") #", "BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client for accessing SciHub \"\"\"", "BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article not found: {query}\") cleaned_url =", "self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi) #", "LOG.debug(\"falling back to %s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\") urls =", "_set_proxy(self, proxy): self._sess.proxies = { \"http\": proxy, \"https\": proxy, } def _set_base_url(self): \"\"\"", "_get(self, url, raise_for_status=True, **kwargs): response = self._sess.get(url, **kwargs) if raise_for_status is True: response.raise_for_status()", "= self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"]) if filename is None else filename )", "a_tag in parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"] if ( \"sci-hub\" in link #", "None else filename ) out_path = os.path.join(destination, filename) with open(out_path, \"wb\") as out_fp:", "self._base_url = base_url LOG.debug(\"url changing to %s\", self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),) =", "rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self, proxy=None,", "response def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back", "_post(self, url, raise_for_status=True, **kwargs): response = self._sess.post(url, **kwargs) if raise_for_status is True: response.raise_for_status()", "\"\"\" Query for a paper hosted by sci-hub \"\"\" response = self._post( self._base_url,", "doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi) # date =", "response.raise_for_status() return response def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError:", "the available scihub urls set the current base url to the new url", "= urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return { \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def", "in parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"] if ( \"sci-hub\" in link # pylint:", "if raise_for_status is True: response.raise_for_status() return response def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False)", "= os.path.join(destination, filename) with open(out_path, \"wb\") as out_fp: out_fp.write(pdf_string) return {\"out_path\": out_path, **query_result}", "= self._sess.get(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response def _post(self, url,", "= random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url LOG.debug(\"url changing to %s\", self._base_url) @staticmethod def", "proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url()", "[] for a_tag in parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"] if ( \"sci-hub\" in", "= \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS)", "urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return { \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def _download_pdf(self,", "!= self.SCIHUB_NOW_URL # pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return urls def _set_proxy(self, proxy): self._sess.proxies", "of valid sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url LOG.debug(\"url changing", "proxy is not None: self._set_proxy(proxy) def _get(self, url, raise_for_status=True, **kwargs): response = self._sess.get(url,", "SciHubClient: \"\"\" Client for accessing SciHub \"\"\" DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0 (X11;", "def _get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi):", "ValueError(\"File is not a pdf\") return result.content def _get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\",", "paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] # return f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\" def download(self,", "class SciHubClient: \"\"\" Client for accessing SciHub \"\"\" DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0", "set the current base url to the new url \"\"\" if not self._available_base_url_list:", "import urllib import requests from bs4 import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class", "paper from sci-hub \"\"\" query_result = self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename = (", "out_path = os.path.join(destination, filename) with open(out_path, \"wb\") as out_fp: out_fp.write(pdf_string) return {\"out_path\": out_path,", "parsed_response = BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article not found: {query}\")", "result.content def _get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self,", "proxy, } def _set_base_url(self): \"\"\" Pick a random url from the available scihub", "raise_for_status=True, **kwargs): response = self._sess.get(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response", "the current base url to the new url \"\"\" if not self._available_base_url_list: raise", "result = self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File is not a pdf\")", "= self._sess.post(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response def _get_available_scihub_urls(self): response", "_download_pdf(self, url): result = self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File is not", "} def _set_base_url(self): \"\"\" Pick a random url from the available scihub urls", ").json() def _generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi) # date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year,", "not self._available_base_url_list: raise ValueError(\"Ran out of valid sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1)", "urls.append(a_tag[\"href\"]) return urls def _set_proxy(self, proxy): self._sess.proxies = { \"http\": proxy, \"https\": proxy,", "self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if proxy", "raise ValueError(f\"Article not found: {query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return {", "available scihub urls set the current base url to the new url \"\"\"", "to %s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\") urls = [] for", "raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content", "\"html.parser\") urls = [] for a_tag in parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"] if", "import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client for accessing SciHub", "proxy, \"https\": proxy, } def _set_base_url(self): \"\"\" Pick a random url from the", "the new url \"\"\" if not self._available_base_url_list: raise ValueError(\"Ran out of valid sci-hub", "= logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client for accessing SciHub \"\"\" DEFAULT_HEADERS =", "def _generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi) # date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _,", "pylint: disable=C0330 and link != self.SCIHUB_NOW_URL # pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return urls", "script.string and \"var doi\" in script.string ] return doi def query(self, query): \"\"\"", "is True: response.raise_for_status() return response def _post(self, url, raise_for_status=True, **kwargs): response = self._sess.post(url,", "def _get(self, url, raise_for_status=True, **kwargs): response = self._sess.get(url, **kwargs) if raise_for_status is True:", "logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client for accessing SciHub \"\"\" DEFAULT_HEADERS = {", "} SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess =", "{query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return { \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url,", "os import random import urllib import requests from bs4 import BeautifulSoup LOG =", "BeautifulSoup(response.content, \"html.parser\") urls = [] for a_tag in parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"]", "download(self, query, destination=\"\", filename=None): \"\"\" Download paper from sci-hub \"\"\" query_result = self.query(query)", "self._available_base_url_list: raise ValueError(\"Ran out of valid sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url", "SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session()", "url from the available scihub urls set the current base url to the", "changing to %s\", self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),) = [ [ line.strip().split(\"'\")[1] for", "try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content =", "disable=C0330 ): urls.append(a_tag[\"href\"]) return urls def _set_proxy(self, proxy): self._sess.proxies = { \"http\": proxy,", "self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not", "url, raise_for_status=True, **kwargs): response = self._sess.post(url, **kwargs) if raise_for_status is True: response.raise_for_status() return", "data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"):", "query(self, query): \"\"\" Query for a paper hosted by sci-hub \"\"\" response =", "self._get_paper_meta(doi) # date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"] title =", "None: self._set_proxy(proxy) def _get(self, url, raise_for_status=True, **kwargs): response = self._sess.get(url, **kwargs) if raise_for_status", "in line ] for script in parsed_response.find_all(\"script\") if script.string and \"var doi\" in", "\"application/pdf\": raise ValueError(\"File is not a pdf\") return result.content def _get_paper_meta(self, doi): return", "filename = ( self._generate_file_name(query_result[\"doi\"]) if filename is None else filename ) out_path =", "self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"]) if filename is None else filename ) out_path", "back to %s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\") urls = []", "True: response.raise_for_status() return response def _post(self, url, raise_for_status=True, **kwargs): response = self._sess.post(url, **kwargs)", "random import urllib import requests from bs4 import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler())", "response.raise_for_status() return response def _post(self, url, raise_for_status=True, **kwargs): response = self._sess.post(url, **kwargs) if", "[ [ line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\") if \"var doi\" in line ]", "_, _),) = paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] # return f\"({date}) {title}.pdf\" return f\"({year})", "\"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article not found: {query}\") cleaned_url = urllib.parse.urlparse(", "= fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if proxy is not None: self._set_proxy(proxy) def", "self._set_base_url() if proxy is not None: self._set_proxy(proxy) def _get(self, url, raise_for_status=True, **kwargs): response", ").geturl() return { \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def _download_pdf(self, url): result =", "headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi) # date = \"-\".join(map(str,", "random url from the available scihub urls set the current base url to", "a_tag[\"href\"] if ( \"sci-hub\" in link # pylint: disable=C0330 and link.startswith(\"https\") # pylint:", "disable=C0330 and link.startswith(\"https\") # pylint: disable=C0330 and link != self.SCIHUB_NOW_URL # pylint: disable=C0330", "out of valid sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url LOG.debug(\"url", "def query(self, query): \"\"\" Query for a paper hosted by sci-hub \"\"\" response", "a random url from the available scihub urls set the current base url", "raise_for_status is True: response.raise_for_status() return response def _post(self, url, raise_for_status=True, **kwargs): response =", "script in parsed_response.find_all(\"script\") if script.string and \"var doi\" in script.string ] return doi", ") out_path = os.path.join(destination, filename) with open(out_path, \"wb\") as out_fp: out_fp.write(pdf_string) return {\"out_path\":", "SciHub client \"\"\" import logging import os import random import urllib import requests", "self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if proxy is not None: self._set_proxy(proxy) def _get(self, url,", "result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File is not a pdf\") return result.content def _get_paper_meta(self,", "cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return { \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, }", "if result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File is not a pdf\") return result.content def", "def _post(self, url, raise_for_status=True, **kwargs): response = self._sess.post(url, **kwargs) if raise_for_status is True:", "Client for accessing SciHub \"\"\" DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64;", "Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess", "except requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\")", "urls = [] for a_tag in parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"] if (", "= self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"]) if filename is None", "line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\") if \"var doi\" in line ] for script", "response def _post(self, url, raise_for_status=True, **kwargs): response = self._sess.post(url, **kwargs) if raise_for_status is", "accessing SciHub \"\"\" DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:77.0) Gecko/20100101", "\"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article not", "= \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] # return", "urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url LOG.debug(\"url changing to %s\", self._base_url)", "= self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File is not a pdf\") return", "return response def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling", "{ \"http\": proxy, \"https\": proxy, } def _set_base_url(self): \"\"\" Pick a random url", "and \"var doi\" in script.string ] return doi def query(self, query): \"\"\" Query", "in link # pylint: disable=C0330 and link.startswith(\"https\") # pylint: disable=C0330 and link !=", "_),) = paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] # return f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\"", "\"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] # return f\"({date})", "raise_for_status is True: response.raise_for_status() return response def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try:", "self._sess.get(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response def _post(self, url, raise_for_status=True,", "LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client for accessing SciHub \"\"\" DEFAULT_HEADERS", "fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if", "requests from bs4 import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client", "base_url LOG.debug(\"url changing to %s\", self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),) = [ [", "pylint: disable=C0330 and link.startswith(\"https\") # pylint: disable=C0330 and link != self.SCIHUB_NOW_URL # pylint:", "!= \"application/pdf\": raise ValueError(\"File is not a pdf\") return result.content def _get_paper_meta(self, doi):", "= base_url LOG.debug(\"url changing to %s\", self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),) = [", "True: response.raise_for_status() return response def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except", "Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL):", "\"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url", "logging import os import random import urllib import requests from bs4 import BeautifulSoup", "pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return urls def _set_proxy(self, proxy): self._sess.proxies = { \"http\":", "not found\"): raise ValueError(f\"Article not found: {query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl()", "\"Mozilla/5.0 (X11; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL =", "client \"\"\" import logging import os import random import urllib import requests from", "\"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def _download_pdf(self, url): result = self._get(url) if result.headers[\"Content-Type\"]", "return response def _post(self, url, raise_for_status=True, **kwargs): response = self._sess.post(url, **kwargs) if raise_for_status", "href=True): link = a_tag[\"href\"] if ( \"sci-hub\" in link # pylint: disable=C0330 and", "] for script in parsed_response.find_all(\"script\") if script.string and \"var doi\" in script.string ]", "FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url =", "paper hosted by sci-hub \"\"\" response = self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"},", "\"\"\" Client for accessing SciHub \"\"\" DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0 (X11; Linux", "self._sess.post(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response def _get_available_scihub_urls(self): response =", "-*- coding: utf-8 -*- \"\"\" SciHub client \"\"\" import logging import os import", "response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content,", "bs4 import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client for accessing", "title = paper_meta[\"title\"] # return f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\" def download(self, query,", "from sci-hub \"\"\" query_result = self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"])", "requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\") urls", "is not None: self._set_proxy(proxy) def _get(self, url, raise_for_status=True, **kwargs): response = self._sess.get(url, **kwargs)", "def _download_pdf(self, url): result = self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File is", "from bs4 import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\" Client for", "return f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\" def download(self, query, destination=\"\", filename=None): \"\"\" Download", "utf-8 -*- \"\"\" SciHub client \"\"\" import logging import os import random import", "= self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content, \"html.parser\") if", "query): \"\"\" Query for a paper hosted by sci-hub \"\"\" response = self._post(", "urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return { \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def _download_pdf(self, url):", "query_result = self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"]) if filename is", "# date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"]", "\"\"\" DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", }", "\"\"\" Pick a random url from the available scihub urls set the current", "for a_tag in parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"] if ( \"sci-hub\" in link", "((doi,),) = [ [ line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\") if \"var doi\" in", "\"http\": proxy, \"https\": proxy, } def _set_base_url(self): \"\"\" Pick a random url from", "_get_doi(parsed_response): ((doi,),) = [ [ line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\") if \"var doi\"", "filename=None): \"\"\" Download paper from sci-hub \"\"\" query_result = self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"])", "self.SCIHUB_NOW_URL # pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return urls def _set_proxy(self, proxy): self._sess.proxies =", "DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL", "import requests from bs4 import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient: \"\"\"", "1) self._base_url = base_url LOG.debug(\"url changing to %s\", self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),)", "= requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if proxy is", "import random import urllib import requests from bs4 import BeautifulSoup LOG = logging.getLogger(__name__)", "a paper hosted by sci-hub \"\"\" response = self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\":", "\"sci-hub\" in link # pylint: disable=C0330 and link.startswith(\"https\") # pylint: disable=C0330 and link", "response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\", self._fallback_base_url)", "to %s\", self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),) = [ [ line.strip().split(\"'\")[1] for line", "self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\", self._fallback_base_url) return [self._fallback_base_url]", "\"\"\" Download paper from sci-hub \"\"\" query_result = self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename", "ValueError(\"Ran out of valid sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url", "sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url LOG.debug(\"url changing to %s\",", "in script.string.split(\"\\n\") if \"var doi\" in line ] for script in parsed_response.find_all(\"script\") if", "Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def", "# -*- coding: utf-8 -*- \"\"\" SciHub client \"\"\" import logging import os", "by sci-hub \"\"\" response = self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response", "urllib import requests from bs4 import BeautifulSoup LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) class SciHubClient:", "self._fallback_base_url = fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if proxy is not None: self._set_proxy(proxy)", "url): result = self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File is not a", "(base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url LOG.debug(\"url changing to %s\", self._base_url) @staticmethod", "= ( self._generate_file_name(query_result[\"doi\"]) if filename is None else filename ) out_path = os.path.join(destination,", "base url to the new url \"\"\" if not self._available_base_url_list: raise ValueError(\"Ran out", "valid sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url LOG.debug(\"url changing to", "pdf\") return result.content def _get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json()", "scheme=\"https\", ).geturl() return { \"doi\": self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def _download_pdf(self, url): result", "random.sample(self._get_available_scihub_urls(), 1) self._base_url = base_url LOG.debug(\"url changing to %s\", self._base_url) @staticmethod def _get_doi(parsed_response):", "filename ) out_path = os.path.join(destination, filename) with open(out_path, \"wb\") as out_fp: out_fp.write(pdf_string) return", "is not a pdf\") return result.content def _get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi),", "# pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return urls def _set_proxy(self, proxy): self._sess.proxies = {", "self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),) = [ [ line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\")", "parsed_response.find_all(\"script\") if script.string and \"var doi\" in script.string ] return doi def query(self,", "hosted by sci-hub \"\"\" response = self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, )", "return f\"({year}) {title}.pdf\" def download(self, query, destination=\"\", filename=None): \"\"\" Download paper from sci-hub", "filename is None else filename ) out_path = os.path.join(destination, filename) with open(out_path, \"wb\")", "def _get_doi(parsed_response): ((doi,),) = [ [ line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\") if \"var", "else filename ) out_path = os.path.join(destination, filename) with open(out_path, \"wb\") as out_fp: out_fp.write(pdf_string)", "[ line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\") if \"var doi\" in line ] for", "def download(self, query, destination=\"\", filename=None): \"\"\" Download paper from sci-hub \"\"\" query_result =", "((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] # return f\"({date}) {title}.pdf\" return", "(X11; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\"", "in script.string ] return doi def query(self, query): \"\"\" Query for a paper", "f\"({year}) {title}.pdf\" def download(self, query, destination=\"\", filename=None): \"\"\" Download paper from sci-hub \"\"\"", "parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article not found: {query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\",", "[self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\") urls = [] for a_tag in parsed_content.find_all(\"a\", href=True):", "if \"var doi\" in line ] for script in parsed_response.find_all(\"script\") if script.string and", "not found: {query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return { \"doi\": self._get_doi(parsed_response),", "if raise_for_status is True: response.raise_for_status() return response def _post(self, url, raise_for_status=True, **kwargs): response", "raise_for_status=True, **kwargs): response = self._sess.post(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response", "_get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi): paper_meta", "url to the new url \"\"\" if not self._available_base_url_list: raise ValueError(\"Ran out of", "_generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi) # date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),)", "= { \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL =", "= \"https://sci-hub.tw\" def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url", "found: {query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return { \"doi\": self._get_doi(parsed_response), \"pdf_url\":", "cleaned_url, } def _download_pdf(self, url): result = self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\": raise", "link = a_tag[\"href\"] if ( \"sci-hub\" in link # pylint: disable=C0330 and link.startswith(\"https\")", "doi\" in script.string ] return doi def query(self, query): \"\"\" Query for a", "-*- \"\"\" SciHub client \"\"\" import logging import os import random import urllib", "raise ValueError(\"File is not a pdf\") return result.content def _get_paper_meta(self, doi): return self._get(", "**kwargs) if raise_for_status is True: response.raise_for_status() return response def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL,", "): urls.append(a_tag[\"href\"]) return urls def _set_proxy(self, proxy): self._sess.proxies = { \"http\": proxy, \"https\":", "parsed_content = BeautifulSoup(response.content, \"html.parser\") urls = [] for a_tag in parsed_content.find_all(\"a\", href=True): link", "paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] # return f\"({date}) {title}.pdf\"", "\"\"\" import logging import os import random import urllib import requests from bs4", "ValueError(f\"Article not found: {query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return { \"doi\":", "urls set the current base url to the new url \"\"\" if not", "url \"\"\" if not self._available_base_url_list: raise ValueError(\"Ran out of valid sci-hub urls\") (base_url,)", "for line in script.string.split(\"\\n\") if \"var doi\" in line ] for script in", "link # pylint: disable=C0330 and link.startswith(\"https\") # pylint: disable=C0330 and link != self.SCIHUB_NOW_URL", "script.string.split(\"\\n\") if \"var doi\" in line ] for script in parsed_response.find_all(\"script\") if script.string", "for script in parsed_response.find_all(\"script\") if script.string and \"var doi\" in script.string ] return", "\"var doi\" in script.string ] return doi def query(self, query): \"\"\" Query for", "raise ValueError(\"Ran out of valid sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(), 1) self._base_url =", "if proxy is not None: self._set_proxy(proxy) def _get(self, url, raise_for_status=True, **kwargs): response =", "doi): paper_meta = self._get_paper_meta(doi) # date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) =", "urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi) # date", "return doi def query(self, query): \"\"\" Query for a paper hosted by sci-hub", "response = self._sess.post(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response def _get_available_scihub_urls(self):", "( \"sci-hub\" in link # pylint: disable=C0330 and link.startswith(\"https\") # pylint: disable=C0330 and", "return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\") urls = [] for a_tag in parsed_content.find_all(\"a\",", "= paper_meta[\"title\"] # return f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\" def download(self, query, destination=\"\",", "doi def query(self, query): \"\"\" Query for a paper hosted by sci-hub \"\"\"", "import os import random import urllib import requests from bs4 import BeautifulSoup LOG", "**kwargs) if raise_for_status is True: response.raise_for_status() return response def _post(self, url, raise_for_status=True, **kwargs):", "{title}.pdf\" def download(self, query, destination=\"\", filename=None): \"\"\" Download paper from sci-hub \"\"\" query_result", "] return doi def query(self, query): \"\"\" Query for a paper hosted by", "doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi): paper_meta =", "%s\", self._fallback_base_url) return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\") urls = [] for a_tag", "paper_meta[\"title\"] # return f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\" def download(self, query, destination=\"\", filename=None):", "response = self._sess.get(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response def _post(self,", "for accessing SciHub \"\"\" DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:77.0)", "\"\"\" if not self._available_base_url_list: raise ValueError(\"Ran out of valid sci-hub urls\") (base_url,) =", "if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article not found: {query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url,", "fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if proxy is not None: self._set_proxy(proxy) def _get(self,", "self._get_available_scihub_urls() self._set_base_url() if proxy is not None: self._set_proxy(proxy) def _get(self, url, raise_for_status=True, **kwargs):", "Pick a random url from the available scihub urls set the current base", "disable=C0330 and link != self.SCIHUB_NOW_URL # pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return urls def", "requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if proxy is not", "self._generate_file_name(query_result[\"doi\"]) if filename is None else filename ) out_path = os.path.join(destination, filename) with", "f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\" def download(self, query, destination=\"\", filename=None): \"\"\" Download paper", "__init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list = self._get_available_scihub_urls()", "def __init__(self, proxy=None, fallback_base_url=FALLBACK_BASE_URL): self._sess = requests.Session() self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list =", "not None: self._set_proxy(proxy) def _get(self, url, raise_for_status=True, **kwargs): response = self._sess.get(url, **kwargs) if", "self._sess.headers.update(self.DEFAULT_HEADERS) self._fallback_base_url = fallback_base_url self._available_base_url_list = self._get_available_scihub_urls() self._set_base_url() if proxy is not None:", "to the new url \"\"\" if not self._available_base_url_list: raise ValueError(\"Ran out of valid", "= BeautifulSoup(response.content, \"html.parser\") urls = [] for a_tag in parsed_content.find_all(\"a\", href=True): link =", "LOG.debug(\"url changing to %s\", self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),) = [ [ line.strip().split(\"'\")[1]", "self._fallback_base_url) return [self._fallback_base_url] parsed_content = BeautifulSoup(response.content, \"html.parser\") urls = [] for a_tag in", "# pylint: disable=C0330 and link.startswith(\"https\") # pylint: disable=C0330 and link != self.SCIHUB_NOW_URL #", "is True: response.raise_for_status() return response def _get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status()", "self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File is not a pdf\") return result.content", "and link.startswith(\"https\") # pylint: disable=C0330 and link != self.SCIHUB_NOW_URL # pylint: disable=C0330 ):", "if filename is None else filename ) out_path = os.path.join(destination, filename) with open(out_path,", "# pylint: disable=C0330 and link != self.SCIHUB_NOW_URL # pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return", "if script.string and \"var doi\" in script.string ] return doi def query(self, query):", "doi\" in line ] for script in parsed_response.find_all(\"script\") if script.string and \"var doi\"", "= paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] # return f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\" def", "sci-hub \"\"\" query_result = self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"]) if", ") parsed_response = BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article not found:", "a pdf\") return result.content def _get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"},", "self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article", "\"var doi\" in line ] for script in parsed_response.find_all(\"script\") if script.string and \"var", "\"pdf_url\": cleaned_url, } def _download_pdf(self, url): result = self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\":", "sci-hub \"\"\" response = self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response =", "pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"]) if filename is None else filename", "line ] for script in parsed_response.find_all(\"script\") if script.string and \"var doi\" in script.string", "( self._generate_file_name(query_result[\"doi\"]) if filename is None else filename ) out_path = os.path.join(destination, filename)", "\"\"\" response = self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content,", "query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise", "link != self.SCIHUB_NOW_URL # pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return urls def _set_proxy(self, proxy):", "self._get_doi(parsed_response), \"pdf_url\": cleaned_url, } def _download_pdf(self, url): result = self._get(url) if result.headers[\"Content-Type\"] !=", "from the available scihub urls set the current base url to the new", "# return f\"({date}) {title}.pdf\" return f\"({year}) {title}.pdf\" def download(self, query, destination=\"\", filename=None): \"\"\"", "coding: utf-8 -*- \"\"\" SciHub client \"\"\" import logging import os import random", "response = self._post( self._base_url, data={\"request\": query}, headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}, ) parsed_response = BeautifulSoup(response.content, \"html.parser\")", "in parsed_response.find_all(\"script\") if script.string and \"var doi\" in script.string ] return doi def", "= self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\", self._fallback_base_url) return", "date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"] title = paper_meta[\"title\"] #", "link.startswith(\"https\") # pylint: disable=C0330 and link != self.SCIHUB_NOW_URL # pylint: disable=C0330 ): urls.append(a_tag[\"href\"])", "def _set_proxy(self, proxy): self._sess.proxies = { \"http\": proxy, \"https\": proxy, } def _set_base_url(self):", "@staticmethod def _get_doi(parsed_response): ((doi,),) = [ [ line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\") if", "self._set_proxy(proxy) def _get(self, url, raise_for_status=True, **kwargs): response = self._sess.get(url, **kwargs) if raise_for_status is", "self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"]) if filename is None else", "**kwargs): response = self._sess.post(url, **kwargs) if raise_for_status is True: response.raise_for_status() return response def", "import logging import os import random import urllib import requests from bs4 import", "destination=\"\", filename=None): \"\"\" Download paper from sci-hub \"\"\" query_result = self.query(query) pdf_string =", "= [ [ line.strip().split(\"'\")[1] for line in script.string.split(\"\\n\") if \"var doi\" in line", "if ( \"sci-hub\" in link # pylint: disable=C0330 and link.startswith(\"https\") # pylint: disable=C0330", "= [] for a_tag in parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"] if ( \"sci-hub\"", "= { \"http\": proxy, \"https\": proxy, } def _set_base_url(self): \"\"\" Pick a random", "def _set_base_url(self): \"\"\" Pick a random url from the available scihub urls set", "\"application/vnd.citationstyles.csl+json\"}, ).json() def _generate_file_name(self, doi): paper_meta = self._get_paper_meta(doi) # date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0]))", "scihub urls set the current base url to the new url \"\"\" if", "_get_available_scihub_urls(self): response = self._get(self.SCIHUB_NOW_URL, raise_for_status=False) try: response.raise_for_status() except requests.exceptions.HTTPError: LOG.debug(\"falling back to %s\",", "} def _download_pdf(self, url): result = self._get(url) if result.headers[\"Content-Type\"] != \"application/pdf\": raise ValueError(\"File", "SciHub \"\"\" DEFAULT_HEADERS = { \"User-Agent\": \"Mozilla/5.0 (X11; Linux x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\",", "for a paper hosted by sci-hub \"\"\" response = self._post( self._base_url, data={\"request\": query},", "urls def _set_proxy(self, proxy): self._sess.proxies = { \"http\": proxy, \"https\": proxy, } def", "and link != self.SCIHUB_NOW_URL # pylint: disable=C0330 ): urls.append(a_tag[\"href\"]) return urls def _set_proxy(self,", "x86_64; rv:77.0) Gecko/20100101 Firefox/77.0\", } SCIHUB_NOW_URL = \"https://sci-hub.now.sh\" FALLBACK_BASE_URL = \"https://sci-hub.tw\" def __init__(self,", "= BeautifulSoup(response.content, \"html.parser\") if parsed_response.find(\"div\").text.endswith(\"article not found\"): raise ValueError(f\"Article not found: {query}\") cleaned_url", "query, destination=\"\", filename=None): \"\"\" Download paper from sci-hub \"\"\" query_result = self.query(query) pdf_string", "{title}.pdf\" return f\"({year}) {title}.pdf\" def download(self, query, destination=\"\", filename=None): \"\"\" Download paper from", "if not self._available_base_url_list: raise ValueError(\"Ran out of valid sci-hub urls\") (base_url,) = random.sample(self._get_available_scihub_urls(),", "proxy): self._sess.proxies = { \"http\": proxy, \"https\": proxy, } def _set_base_url(self): \"\"\" Pick", "return result.content def _get_paper_meta(self, doi): return self._get( urllib.parse.urljoin(\"https://doi.org\", doi), headers={\"Accept\": \"application/vnd.citationstyles.csl+json\"}, ).json() def", "current base url to the new url \"\"\" if not self._available_base_url_list: raise ValueError(\"Ran", "return urls def _set_proxy(self, proxy): self._sess.proxies = { \"http\": proxy, \"https\": proxy, }", "paper_meta = self._get_paper_meta(doi) # date = \"-\".join(map(str, paper_meta[\"indexed\"][\"date-parts\"][0])) ((year, _, _),) = paper_meta[\"published-print\"][\"date-parts\"]", "found\"): raise ValueError(f\"Article not found: {query}\") cleaned_url = urllib.parse.urlparse( urllib.parse.urldefrag(parsed_response.find(\"iframe\").get(\"src\")).url, scheme=\"https\", ).geturl() return", "parsed_content.find_all(\"a\", href=True): link = a_tag[\"href\"] if ( \"sci-hub\" in link # pylint: disable=C0330", "Download paper from sci-hub \"\"\" query_result = self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename =", "\"\"\" query_result = self.query(query) pdf_string = self._download_pdf(query_result[\"pdf_url\"]) filename = ( self._generate_file_name(query_result[\"doi\"]) if filename", "%s\", self._base_url) @staticmethod def _get_doi(parsed_response): ((doi,),) = [ [ line.strip().split(\"'\")[1] for line in" ]
[ "`media_size`, must not be `None`\") # noqa: E501 self._media_size = param @property def", "typing import List, Dict # noqa: F401 from jobbing.models.base_model_ import Model from jobbing", "E501 self.swagger_types = { 'media_id': int, 'media_status_id': int, 'media_data': str, 'media_link': str, 'media_title':", "return self._media_data @media_data.setter def media_data(self, param): if param is None: raise ValueError(\"Invalid value", "return util.deserialize_model(dikt, cls) @property def media_id(self) -> int: return self._media_id @media_id.setter def media_id(self,", "param): if param is None: raise ValueError(\"Invalid value for `media_content_updated_date`, must not be", "def media_title(self) -> str: return self._media_title @media_title.setter def media_title(self, param): if param is", "param is None: raise ValueError(\"Invalid value for `media_size`, must not be `None`\") #", "absolute_import from datetime import date, datetime # noqa: F401 from typing import List,", "media_size(self, param): if param is None: raise ValueError(\"Invalid value for `media_size`, must not", "media_description(self, param): if param is None: raise ValueError(\"Invalid value for `media_description`, must not", "if param is None: raise ValueError(\"Invalid value for `media_id`, must not be `None`\")", "@media_link.setter def media_link(self, param): if param is None: raise ValueError(\"Invalid value for `media_link`,", "`None`\") # noqa: E501 self._media_size = param @property def media_content_upload_date(self) -> str: return", "= media_content_updated_date @classmethod def from_dict(cls, dikt) -> 'Media': return util.deserialize_model(dikt, cls) @property def", "# noqa: F401 from typing import List, Dict # noqa: F401 from jobbing.models.base_model_", "= param @property def media_content_upload_date(self) -> str: return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param):", "self._media_id = param @property def media_status_id(self) -> int: return self._media_status_id @media_status_id.setter def media_status_id(self,", "param is None: raise ValueError(\"Invalid value for `media_data`, must not be `None`\") #", "ValueError(\"Invalid value for `media_size`, must not be `None`\") # noqa: E501 self._media_size =", "ValueError(\"Invalid value for `media_link`, must not be `None`\") # noqa: E501 self._media_link =", "'media_data': 'media_data', 'media_link': 'media_link', 'media_title': 'media_title', 'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date':", "media_content_updated_date(self, param): if param is None: raise ValueError(\"Invalid value for `media_content_updated_date`, must not", "media_status_id:int = None, media_data:str = None, media_link:str = None, media_title:str = None, media_description:str", "value for `media_link`, must not be `None`\") # noqa: E501 self._media_link = param", "'media_link', 'media_title': 'media_title', 'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id", "str: return self._media_title @media_title.setter def media_title(self, param): if param is None: raise ValueError(\"Invalid", "def media_id(self, param): if param is None: raise ValueError(\"Invalid value for `media_id`, must", "media_content_updated_date:str = None): # noqa: E501 self.swagger_types = { 'media_id': int, 'media_status_id': int,", "ValueError(\"Invalid value for `media_content_upload_date`, must not be `None`\") # noqa: E501 self._media_content_upload_date =", "# coding: utf-8 from __future__ import absolute_import from datetime import date, datetime #", "@media_size.setter def media_size(self, param): if param is None: raise ValueError(\"Invalid value for `media_size`,", "must not be `None`\") # noqa: E501 self._media_id = param @property def media_status_id(self)", "E501 self._media_id = param @property def media_status_id(self) -> int: return self._media_status_id @media_status_id.setter def", "@property def media_status_id(self) -> int: return self._media_status_id @media_status_id.setter def media_status_id(self, param): if param", "`media_title`, must not be `None`\") # noqa: E501 self._media_title = param @property def", "must not be `None`\") # noqa: E501 self._media_data = param @property def media_link(self)", "`None`\") # noqa: E501 self._media_link = param @property def media_title(self) -> str: return", "# noqa: E501 self._media_id = param @property def media_status_id(self) -> int: return self._media_status_id", "'media_content_updated_date': str } self.attribute_map = { 'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link':", "noqa: E501 self._media_description = param @property def media_size(self) -> float: return self._media_size @media_size.setter", "is None: raise ValueError(\"Invalid value for `media_status_id`, must not be `None`\") # noqa:", "@media_data.setter def media_data(self, param): if param is None: raise ValueError(\"Invalid value for `media_data`,", "return self._media_title @media_title.setter def media_title(self, param): if param is None: raise ValueError(\"Invalid value", "datetime # noqa: F401 from typing import List, Dict # noqa: F401 from", "'media_data': str, 'media_link': str, 'media_title': str, 'media_description': str, 'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date':", "is None: raise ValueError(\"Invalid value for `media_data`, must not be `None`\") # noqa:", "= None, media_description:str = None, media_size:float = None, media_content_upload_date:str = None, media_content_updated_date:str =", "from typing import List, Dict # noqa: F401 from jobbing.models.base_model_ import Model from", "= media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod def from_dict(cls, dikt) -> 'Media': return util.deserialize_model(dikt,", "@classmethod def from_dict(cls, dikt) -> 'Media': return util.deserialize_model(dikt, cls) @property def media_id(self) ->", "@media_content_updated_date.setter def media_content_updated_date(self, param): if param is None: raise ValueError(\"Invalid value for `media_content_updated_date`,", "noqa: E501 self._media_status_id = param @property def media_data(self) -> str: return self._media_data @media_data.setter", "@property def media_id(self) -> int: return self._media_id @media_id.setter def media_id(self, param): if param", "'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id = media_id self._media_status_id =", "def media_content_updated_date(self) -> str: return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param): if param is", "self.attribute_map = { 'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link', 'media_title': 'media_title',", "self.swagger_types = { 'media_id': int, 'media_status_id': int, 'media_data': str, 'media_link': str, 'media_title': str,", "@property def media_content_upload_date(self) -> str: return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param): if param", "noqa: F401 from jobbing.models.base_model_ import Model from jobbing import util class Media(Model): def", "is None: raise ValueError(\"Invalid value for `media_content_upload_date`, must not be `None`\") # noqa:", "media_status_id(self, param): if param is None: raise ValueError(\"Invalid value for `media_status_id`, must not", "self._media_data @media_data.setter def media_data(self, param): if param is None: raise ValueError(\"Invalid value for", "-> str: return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param): if param is None: raise", "`media_id`, must not be `None`\") # noqa: E501 self._media_id = param @property def", "ValueError(\"Invalid value for `media_description`, must not be `None`\") # noqa: E501 self._media_description =", "value for `media_data`, must not be `None`\") # noqa: E501 self._media_data = param", "= param @property def media_description(self) -> str: return self._media_description @media_description.setter def media_description(self, param):", "{ 'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link', 'media_title': 'media_title', 'media_description': 'media_description',", "-> 'Media': return util.deserialize_model(dikt, cls) @property def media_id(self) -> int: return self._media_id @media_id.setter", "= media_title self._media_description = media_description self._media_size = media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date =", "for `media_title`, must not be `None`\") # noqa: E501 self._media_title = param @property", "not be `None`\") # noqa: E501 self._media_data = param @property def media_link(self) ->", "@property def media_description(self) -> str: return self._media_description @media_description.setter def media_description(self, param): if param", "str, 'media_description': str, 'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date': str } self.attribute_map = {", "`media_description`, must not be `None`\") # noqa: E501 self._media_description = param @property def", "= None, media_link:str = None, media_title:str = None, media_description:str = None, media_size:float =", "media_status_id(self) -> int: return self._media_status_id @media_status_id.setter def media_status_id(self, param): if param is None:", "def media_data(self, param): if param is None: raise ValueError(\"Invalid value for `media_data`, must", "for `media_status_id`, must not be `None`\") # noqa: E501 self._media_status_id = param @property", "@property def media_title(self) -> str: return self._media_title @media_title.setter def media_title(self, param): if param", "E501 self._media_content_upload_date = param @property def media_content_updated_date(self) -> str: return self._media_content_updated_date @media_content_updated_date.setter def", "`media_content_upload_date`, must not be `None`\") # noqa: E501 self._media_content_upload_date = param @property def", "@media_description.setter def media_description(self, param): if param is None: raise ValueError(\"Invalid value for `media_description`,", "if param is None: raise ValueError(\"Invalid value for `media_content_updated_date`, must not be `None`\")", "E501 self._media_description = param @property def media_size(self) -> float: return self._media_size @media_size.setter def", "<gh_stars>0 # coding: utf-8 from __future__ import absolute_import from datetime import date, datetime", "} self.attribute_map = { 'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link', 'media_title':", "media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod def from_dict(cls, dikt) -> 'Media':", "util class Media(Model): def __init__(self, media_id:int = None, media_status_id:int = None, media_data:str =", "'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link', 'media_title': 'media_title', 'media_description': 'media_description', 'media_size': 'media_size',", "media_id(self, param): if param is None: raise ValueError(\"Invalid value for `media_id`, must not", "param): if param is None: raise ValueError(\"Invalid value for `media_title`, must not be", "for `media_description`, must not be `None`\") # noqa: E501 self._media_description = param @property", "param is None: raise ValueError(\"Invalid value for `media_id`, must not be `None`\") #", "is None: raise ValueError(\"Invalid value for `media_content_updated_date`, must not be `None`\") # noqa:", "param @property def media_content_updated_date(self) -> str: return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param): if", "self._media_link = param @property def media_title(self) -> str: return self._media_title @media_title.setter def media_title(self,", "class Media(Model): def __init__(self, media_id:int = None, media_status_id:int = None, media_data:str = None,", "'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id = media_id self._media_status_id = media_status_id self._media_data", "raise ValueError(\"Invalid value for `media_content_upload_date`, must not be `None`\") # noqa: E501 self._media_content_upload_date", "'media_content_upload_date': str, 'media_content_updated_date': str } self.attribute_map = { 'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data':", "datetime import date, datetime # noqa: F401 from typing import List, Dict #", "`media_status_id`, must not be `None`\") # noqa: E501 self._media_status_id = param @property def", "= None): # noqa: E501 self.swagger_types = { 'media_id': int, 'media_status_id': int, 'media_data':", "self._media_id @media_id.setter def media_id(self, param): if param is None: raise ValueError(\"Invalid value for", "# noqa: E501 self.swagger_types = { 'media_id': int, 'media_status_id': int, 'media_data': str, 'media_link':", "dikt) -> 'Media': return util.deserialize_model(dikt, cls) @property def media_id(self) -> int: return self._media_id", "`media_data`, must not be `None`\") # noqa: E501 self._media_data = param @property def", "be `None`\") # noqa: E501 self._media_size = param @property def media_content_upload_date(self) -> str:", "param is None: raise ValueError(\"Invalid value for `media_status_id`, must not be `None`\") #", "E501 self._media_status_id = param @property def media_data(self) -> str: return self._media_data @media_data.setter def", "def media_content_upload_date(self) -> str: return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param): if param is", "raise ValueError(\"Invalid value for `media_title`, must not be `None`\") # noqa: E501 self._media_title", "{ 'media_id': int, 'media_status_id': int, 'media_data': str, 'media_link': str, 'media_title': str, 'media_description': str,", "raise ValueError(\"Invalid value for `media_link`, must not be `None`\") # noqa: E501 self._media_link", "'media_description': str, 'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date': str } self.attribute_map = { 'media_id':", "raise ValueError(\"Invalid value for `media_data`, must not be `None`\") # noqa: E501 self._media_data", "for `media_link`, must not be `None`\") # noqa: E501 self._media_link = param @property", "self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param): if param is None: raise ValueError(\"Invalid value for", "F401 from jobbing.models.base_model_ import Model from jobbing import util class Media(Model): def __init__(self,", "media_status_id self._media_data = media_data self._media_link = media_link self._media_title = media_title self._media_description = media_description", "media_title(self) -> str: return self._media_title @media_title.setter def media_title(self, param): if param is None:", "str: return self._media_description @media_description.setter def media_description(self, param): if param is None: raise ValueError(\"Invalid", "None, media_content_updated_date:str = None): # noqa: E501 self.swagger_types = { 'media_id': int, 'media_status_id':", "__init__(self, media_id:int = None, media_status_id:int = None, media_data:str = None, media_link:str = None,", "-> str: return self._media_description @media_description.setter def media_description(self, param): if param is None: raise", "self._media_description = media_description self._media_size = media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod", "self._media_size = media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod def from_dict(cls, dikt)", "@property def media_size(self) -> float: return self._media_size @media_size.setter def media_size(self, param): if param", "media_size:float = None, media_content_upload_date:str = None, media_content_updated_date:str = None): # noqa: E501 self.swagger_types", "'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date': str } self.attribute_map = { 'media_id': 'media_id', 'media_status_id':", "str: return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param): if param is None: raise ValueError(\"Invalid", "int: return self._media_status_id @media_status_id.setter def media_status_id(self, param): if param is None: raise ValueError(\"Invalid", "def media_size(self) -> float: return self._media_size @media_size.setter def media_size(self, param): if param is", "param is None: raise ValueError(\"Invalid value for `media_description`, must not be `None`\") #", "param is None: raise ValueError(\"Invalid value for `media_title`, must not be `None`\") #", "None: raise ValueError(\"Invalid value for `media_size`, must not be `None`\") # noqa: E501", "be `None`\") # noqa: E501 self._media_content_upload_date = param @property def media_content_updated_date(self) -> str:", "param): if param is None: raise ValueError(\"Invalid value for `media_id`, must not be", "media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod def from_dict(cls, dikt) -> 'Media': return util.deserialize_model(dikt, cls)", "value for `media_content_upload_date`, must not be `None`\") # noqa: E501 self._media_content_upload_date = param", "param): if param is None: raise ValueError(\"Invalid value for `media_status_id`, must not be", "date, datetime # noqa: F401 from typing import List, Dict # noqa: F401", "= media_description self._media_size = media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod def", "None: raise ValueError(\"Invalid value for `media_status_id`, must not be `None`\") # noqa: E501", "if param is None: raise ValueError(\"Invalid value for `media_content_upload_date`, must not be `None`\")", "value for `media_title`, must not be `None`\") # noqa: E501 self._media_title = param", "raise ValueError(\"Invalid value for `media_id`, must not be `None`\") # noqa: E501 self._media_id", "raise ValueError(\"Invalid value for `media_status_id`, must not be `None`\") # noqa: E501 self._media_status_id", "must not be `None`\") # noqa: E501 self._media_description = param @property def media_size(self)", "media_data(self) -> str: return self._media_data @media_data.setter def media_data(self, param): if param is None:", "# noqa: E501 self._media_size = param @property def media_content_upload_date(self) -> str: return self._media_content_upload_date", "value for `media_description`, must not be `None`\") # noqa: E501 self._media_description = param", "media_content_upload_date(self, param): if param is None: raise ValueError(\"Invalid value for `media_content_upload_date`, must not", "= media_link self._media_title = media_title self._media_description = media_description self._media_size = media_size self._media_content_upload_date =", "def media_id(self) -> int: return self._media_id @media_id.setter def media_id(self, param): if param is", "if param is None: raise ValueError(\"Invalid value for `media_size`, must not be `None`\")", "be `None`\") # noqa: E501 self._media_title = param @property def media_description(self) -> str:", "'media_data', 'media_link': 'media_link', 'media_title': 'media_title', 'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date'", "from_dict(cls, dikt) -> 'Media': return util.deserialize_model(dikt, cls) @property def media_id(self) -> int: return", "'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link', 'media_title': 'media_title', 'media_description': 'media_description', 'media_size':", "@media_id.setter def media_id(self, param): if param is None: raise ValueError(\"Invalid value for `media_id`,", "for `media_data`, must not be `None`\") # noqa: E501 self._media_data = param @property", "util.deserialize_model(dikt, cls) @property def media_id(self) -> int: return self._media_id @media_id.setter def media_id(self, param):", "cls) @property def media_id(self) -> int: return self._media_id @media_id.setter def media_id(self, param): if", "'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link', 'media_title': 'media_title', 'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date':", "import util class Media(Model): def __init__(self, media_id:int = None, media_status_id:int = None, media_data:str", "Dict # noqa: F401 from jobbing.models.base_model_ import Model from jobbing import util class", "def from_dict(cls, dikt) -> 'Media': return util.deserialize_model(dikt, cls) @property def media_id(self) -> int:", "None: raise ValueError(\"Invalid value for `media_data`, must not be `None`\") # noqa: E501", "must not be `None`\") # noqa: E501 self._media_status_id = param @property def media_data(self)", "= { 'media_id': int, 'media_status_id': int, 'media_data': str, 'media_link': str, 'media_title': str, 'media_description':", "# noqa: E501 self._media_data = param @property def media_link(self) -> str: return self._media_link", "def media_status_id(self, param): if param is None: raise ValueError(\"Invalid value for `media_status_id`, must", "self._media_link @media_link.setter def media_link(self, param): if param is None: raise ValueError(\"Invalid value for", "str } self.attribute_map = { 'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link',", "= param @property def media_link(self) -> str: return self._media_link @media_link.setter def media_link(self, param):", "self._media_size @media_size.setter def media_size(self, param): if param is None: raise ValueError(\"Invalid value for", "} self._media_id = media_id self._media_status_id = media_status_id self._media_data = media_data self._media_link = media_link", "self._media_title = param @property def media_description(self) -> str: return self._media_description @media_description.setter def media_description(self,", "param is None: raise ValueError(\"Invalid value for `media_content_upload_date`, must not be `None`\") #", "self._media_status_id = param @property def media_data(self) -> str: return self._media_data @media_data.setter def media_data(self,", "media_id self._media_status_id = media_status_id self._media_data = media_data self._media_link = media_link self._media_title = media_title", "def media_title(self, param): if param is None: raise ValueError(\"Invalid value for `media_title`, must", "'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id = media_id self._media_status_id = media_status_id self._media_data = media_data", "'media_link': 'media_link', 'media_title': 'media_title', 'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' }", "'media_title': str, 'media_description': str, 'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date': str } self.attribute_map =", "noqa: E501 self._media_link = param @property def media_title(self) -> str: return self._media_title @media_title.setter", "jobbing.models.base_model_ import Model from jobbing import util class Media(Model): def __init__(self, media_id:int =", "ValueError(\"Invalid value for `media_content_updated_date`, must not be `None`\") # noqa: E501 self._media_content_updated_date =", "value for `media_id`, must not be `None`\") # noqa: E501 self._media_id = param", "self._media_content_upload_date = media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod def from_dict(cls, dikt) -> 'Media': return", "@media_status_id.setter def media_status_id(self, param): if param is None: raise ValueError(\"Invalid value for `media_status_id`,", "self._media_status_id @media_status_id.setter def media_status_id(self, param): if param is None: raise ValueError(\"Invalid value for", "media_content_updated_date @classmethod def from_dict(cls, dikt) -> 'Media': return util.deserialize_model(dikt, cls) @property def media_id(self)", "int, 'media_status_id': int, 'media_data': str, 'media_link': str, 'media_title': str, 'media_description': str, 'media_size': float,", "float, 'media_content_upload_date': str, 'media_content_updated_date': str } self.attribute_map = { 'media_id': 'media_id', 'media_status_id': 'media_status_id',", "-> float: return self._media_size @media_size.setter def media_size(self, param): if param is None: raise", "= param @property def media_title(self) -> str: return self._media_title @media_title.setter def media_title(self, param):", "must not be `None`\") # noqa: E501 self._media_title = param @property def media_description(self)", "`None`\") # noqa: E501 self._media_title = param @property def media_description(self) -> str: return", "`None`\") # noqa: E501 self._media_description = param @property def media_size(self) -> float: return", "@media_content_upload_date.setter def media_content_upload_date(self, param): if param is None: raise ValueError(\"Invalid value for `media_content_upload_date`,", "return self._media_description @media_description.setter def media_description(self, param): if param is None: raise ValueError(\"Invalid value", "noqa: E501 self._media_size = param @property def media_content_upload_date(self) -> str: return self._media_content_upload_date @media_content_upload_date.setter", "`None`\") # noqa: E501 self._media_content_upload_date = param @property def media_content_updated_date(self) -> str: return", "None: raise ValueError(\"Invalid value for `media_id`, must not be `None`\") # noqa: E501", "self._media_id = media_id self._media_status_id = media_status_id self._media_data = media_data self._media_link = media_link self._media_title", "# noqa: E501 self._media_title = param @property def media_description(self) -> str: return self._media_description", "import List, Dict # noqa: F401 from jobbing.models.base_model_ import Model from jobbing import", "str, 'media_title': str, 'media_description': str, 'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date': str } self.attribute_map", "media_data(self, param): if param is None: raise ValueError(\"Invalid value for `media_data`, must not", "None, media_status_id:int = None, media_data:str = None, media_link:str = None, media_title:str = None,", "return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param): if param is None: raise ValueError(\"Invalid value", "import date, datetime # noqa: F401 from typing import List, Dict # noqa:", "self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param): if param is None: raise ValueError(\"Invalid value for", "if param is None: raise ValueError(\"Invalid value for `media_status_id`, must not be `None`\")", "return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param): if param is None: raise ValueError(\"Invalid value", "noqa: E501 self._media_data = param @property def media_link(self) -> str: return self._media_link @media_link.setter", "'media_content_updated_date': 'media_content_updated_date' } self._media_id = media_id self._media_status_id = media_status_id self._media_data = media_data self._media_link", "'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id = media_id self._media_status_id", "for `media_size`, must not be `None`\") # noqa: E501 self._media_size = param @property", "def media_content_upload_date(self, param): if param is None: raise ValueError(\"Invalid value for `media_content_upload_date`, must", "E501 self._media_data = param @property def media_link(self) -> str: return self._media_link @media_link.setter def", "'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link', 'media_title': 'media_title', 'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date',", "media_link(self, param): if param is None: raise ValueError(\"Invalid value for `media_link`, must not", "List, Dict # noqa: F401 from jobbing.models.base_model_ import Model from jobbing import util", "param @property def media_status_id(self) -> int: return self._media_status_id @media_status_id.setter def media_status_id(self, param): if", "param @property def media_link(self) -> str: return self._media_link @media_link.setter def media_link(self, param): if", "E501 self._media_size = param @property def media_content_upload_date(self) -> str: return self._media_content_upload_date @media_content_upload_date.setter def", "for `media_content_upload_date`, must not be `None`\") # noqa: E501 self._media_content_upload_date = param @property", "media_title:str = None, media_description:str = None, media_size:float = None, media_content_upload_date:str = None, media_content_updated_date:str", "param @property def media_size(self) -> float: return self._media_size @media_size.setter def media_size(self, param): if", "def media_status_id(self) -> int: return self._media_status_id @media_status_id.setter def media_status_id(self, param): if param is", "media_content_upload_date(self) -> str: return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param): if param is None:", "# noqa: F401 from jobbing.models.base_model_ import Model from jobbing import util class Media(Model):", "'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id = media_id self._media_status_id = media_status_id self._media_data =", "self._media_content_upload_date = param @property def media_content_updated_date(self) -> str: return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self,", "value for `media_status_id`, must not be `None`\") # noqa: E501 self._media_status_id = param", "media_size(self) -> float: return self._media_size @media_size.setter def media_size(self, param): if param is None:", "is None: raise ValueError(\"Invalid value for `media_link`, must not be `None`\") # noqa:", "None, media_description:str = None, media_size:float = None, media_content_upload_date:str = None, media_content_updated_date:str = None):", "'media_id': int, 'media_status_id': int, 'media_data': str, 'media_link': str, 'media_title': str, 'media_description': str, 'media_size':", "param is None: raise ValueError(\"Invalid value for `media_content_updated_date`, must not be `None`\") #", "def media_description(self, param): if param is None: raise ValueError(\"Invalid value for `media_description`, must", "be `None`\") # noqa: E501 self._media_link = param @property def media_title(self) -> str:", "media_link:str = None, media_title:str = None, media_description:str = None, media_size:float = None, media_content_upload_date:str", "import absolute_import from datetime import date, datetime # noqa: F401 from typing import", "'media_title': 'media_title', 'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id =", "media_link self._media_title = media_title self._media_description = media_description self._media_size = media_size self._media_content_upload_date = media_content_upload_date", "media_data:str = None, media_link:str = None, media_title:str = None, media_description:str = None, media_size:float", "noqa: E501 self.swagger_types = { 'media_id': int, 'media_status_id': int, 'media_data': str, 'media_link': str,", "media_title self._media_description = media_description self._media_size = media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date = media_content_updated_date", "not be `None`\") # noqa: E501 self._media_content_upload_date = param @property def media_content_updated_date(self) ->", "None, media_title:str = None, media_description:str = None, media_size:float = None, media_content_upload_date:str = None,", "`None`\") # noqa: E501 self._media_status_id = param @property def media_data(self) -> str: return", "= None, media_content_updated_date:str = None): # noqa: E501 self.swagger_types = { 'media_id': int,", "`None`\") # noqa: E501 self._media_id = param @property def media_status_id(self) -> int: return", "media_id:int = None, media_status_id:int = None, media_data:str = None, media_link:str = None, media_title:str", "'media_title', 'media_description': 'media_description', 'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id = media_id", "= media_status_id self._media_data = media_data self._media_link = media_link self._media_title = media_title self._media_description =", "def media_description(self) -> str: return self._media_description @media_description.setter def media_description(self, param): if param is", "= None, media_data:str = None, media_link:str = None, media_title:str = None, media_description:str =", "None, media_size:float = None, media_content_upload_date:str = None, media_content_updated_date:str = None): # noqa: E501", "__future__ import absolute_import from datetime import date, datetime # noqa: F401 from typing", "param @property def media_data(self) -> str: return self._media_data @media_data.setter def media_data(self, param): if", "# noqa: E501 self._media_content_upload_date = param @property def media_content_updated_date(self) -> str: return self._media_content_updated_date", "from datetime import date, datetime # noqa: F401 from typing import List, Dict", "is None: raise ValueError(\"Invalid value for `media_description`, must not be `None`\") # noqa:", "noqa: F401 from typing import List, Dict # noqa: F401 from jobbing.models.base_model_ import", "media_content_upload_date:str = None, media_content_updated_date:str = None): # noqa: E501 self.swagger_types = { 'media_id':", "raise ValueError(\"Invalid value for `media_content_updated_date`, must not be `None`\") # noqa: E501 self._media_content_updated_date", "str, 'media_link': str, 'media_title': str, 'media_description': str, 'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date': str", "# noqa: E501 self._media_status_id = param @property def media_data(self) -> str: return self._media_data", "param): if param is None: raise ValueError(\"Invalid value for `media_data`, must not be", "self._media_title = media_title self._media_description = media_description self._media_size = media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date", "not be `None`\") # noqa: E501 self._media_status_id = param @property def media_data(self) ->", "media_content_updated_date(self) -> str: return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param): if param is None:", "F401 from typing import List, Dict # noqa: F401 from jobbing.models.base_model_ import Model", "return self._media_size @media_size.setter def media_size(self, param): if param is None: raise ValueError(\"Invalid value", "media_description(self) -> str: return self._media_description @media_description.setter def media_description(self, param): if param is None:", "be `None`\") # noqa: E501 self._media_data = param @property def media_link(self) -> str:", "None, media_content_upload_date:str = None, media_content_updated_date:str = None): # noqa: E501 self.swagger_types = {", "self._media_description @media_description.setter def media_description(self, param): if param is None: raise ValueError(\"Invalid value for", "raise ValueError(\"Invalid value for `media_size`, must not be `None`\") # noqa: E501 self._media_size", "= None, media_content_upload_date:str = None, media_content_updated_date:str = None): # noqa: E501 self.swagger_types =", "utf-8 from __future__ import absolute_import from datetime import date, datetime # noqa: F401", "import Model from jobbing import util class Media(Model): def __init__(self, media_id:int = None,", "= None, media_size:float = None, media_content_upload_date:str = None, media_content_updated_date:str = None): # noqa:", "@property def media_content_updated_date(self) -> str: return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param): if param", "def media_link(self, param): if param is None: raise ValueError(\"Invalid value for `media_link`, must", "ValueError(\"Invalid value for `media_title`, must not be `None`\") # noqa: E501 self._media_title =", "= param @property def media_content_updated_date(self) -> str: return self._media_content_updated_date @media_content_updated_date.setter def media_content_updated_date(self, param):", "if param is None: raise ValueError(\"Invalid value for `media_data`, must not be `None`\")", "ValueError(\"Invalid value for `media_id`, must not be `None`\") # noqa: E501 self._media_id =", "self._media_data = media_data self._media_link = media_link self._media_title = media_title self._media_description = media_description self._media_size", "not be `None`\") # noqa: E501 self._media_link = param @property def media_title(self) ->", "param @property def media_content_upload_date(self) -> str: return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param): if", "value for `media_content_updated_date`, must not be `None`\") # noqa: E501 self._media_content_updated_date = param", "'media_link': str, 'media_title': str, 'media_description': str, 'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date': str }", "self._media_description = param @property def media_size(self) -> float: return self._media_size @media_size.setter def media_size(self,", "ValueError(\"Invalid value for `media_status_id`, must not be `None`\") # noqa: E501 self._media_status_id =", "@media_title.setter def media_title(self, param): if param is None: raise ValueError(\"Invalid value for `media_title`,", "from jobbing.models.base_model_ import Model from jobbing import util class Media(Model): def __init__(self, media_id:int", "None: raise ValueError(\"Invalid value for `media_description`, must not be `None`\") # noqa: E501", "raise ValueError(\"Invalid value for `media_description`, must not be `None`\") # noqa: E501 self._media_description", "None): # noqa: E501 self.swagger_types = { 'media_id': int, 'media_status_id': int, 'media_data': str,", "def media_link(self) -> str: return self._media_link @media_link.setter def media_link(self, param): if param is", "= media_id self._media_status_id = media_status_id self._media_data = media_data self._media_link = media_link self._media_title =", "-> str: return self._media_data @media_data.setter def media_data(self, param): if param is None: raise", "value for `media_size`, must not be `None`\") # noqa: E501 self._media_size = param", "param @property def media_description(self) -> str: return self._media_description @media_description.setter def media_description(self, param): if", "-> int: return self._media_id @media_id.setter def media_id(self, param): if param is None: raise", "noqa: E501 self._media_title = param @property def media_description(self) -> str: return self._media_description @media_description.setter", "param is None: raise ValueError(\"Invalid value for `media_link`, must not be `None`\") #", "= param @property def media_data(self) -> str: return self._media_data @media_data.setter def media_data(self, param):", "is None: raise ValueError(\"Invalid value for `media_size`, must not be `None`\") # noqa:", "int: return self._media_id @media_id.setter def media_id(self, param): if param is None: raise ValueError(\"Invalid", "def media_data(self) -> str: return self._media_data @media_data.setter def media_data(self, param): if param is", "self._media_link = media_link self._media_title = media_title self._media_description = media_description self._media_size = media_size self._media_content_upload_date", "media_title(self, param): if param is None: raise ValueError(\"Invalid value for `media_title`, must not", "= media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod def from_dict(cls, dikt) ->", "param): if param is None: raise ValueError(\"Invalid value for `media_size`, must not be", "`None`\") # noqa: E501 self._media_data = param @property def media_link(self) -> str: return", "ValueError(\"Invalid value for `media_data`, must not be `None`\") # noqa: E501 self._media_data =", "str: return self._media_data @media_data.setter def media_data(self, param): if param is None: raise ValueError(\"Invalid", "str: return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param): if param is None: raise ValueError(\"Invalid", "media_description:str = None, media_size:float = None, media_content_upload_date:str = None, media_content_updated_date:str = None): #", "coding: utf-8 from __future__ import absolute_import from datetime import date, datetime # noqa:", "return self._media_link @media_link.setter def media_link(self, param): if param is None: raise ValueError(\"Invalid value", "self._media_status_id = media_status_id self._media_data = media_data self._media_link = media_link self._media_title = media_title self._media_description", "be `None`\") # noqa: E501 self._media_status_id = param @property def media_data(self) -> str:", "-> str: return self._media_link @media_link.setter def media_link(self, param): if param is None: raise", "int, 'media_data': str, 'media_link': str, 'media_title': str, 'media_description': str, 'media_size': float, 'media_content_upload_date': str,", "def __init__(self, media_id:int = None, media_status_id:int = None, media_data:str = None, media_link:str =", "param @property def media_title(self) -> str: return self._media_title @media_title.setter def media_title(self, param): if", "E501 self._media_title = param @property def media_description(self) -> str: return self._media_description @media_description.setter def", "= { 'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data', 'media_link': 'media_link', 'media_title': 'media_title', 'media_description':", "be `None`\") # noqa: E501 self._media_description = param @property def media_size(self) -> float:", "self._media_size = param @property def media_content_upload_date(self) -> str: return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self,", "float: return self._media_size @media_size.setter def media_size(self, param): if param is None: raise ValueError(\"Invalid", "is None: raise ValueError(\"Invalid value for `media_id`, must not be `None`\") # noqa:", "= None, media_title:str = None, media_description:str = None, media_size:float = None, media_content_upload_date:str =", "return self._media_id @media_id.setter def media_id(self, param): if param is None: raise ValueError(\"Invalid value", "be `None`\") # noqa: E501 self._media_id = param @property def media_status_id(self) -> int:", "E501 self._media_link = param @property def media_title(self) -> str: return self._media_title @media_title.setter def", "not be `None`\") # noqa: E501 self._media_description = param @property def media_size(self) ->", "self._media_title @media_title.setter def media_title(self, param): if param is None: raise ValueError(\"Invalid value for", "self._media_content_updated_date = media_content_updated_date @classmethod def from_dict(cls, dikt) -> 'Media': return util.deserialize_model(dikt, cls) @property", "None: raise ValueError(\"Invalid value for `media_content_updated_date`, must not be `None`\") # noqa: E501", "Media(Model): def __init__(self, media_id:int = None, media_status_id:int = None, media_data:str = None, media_link:str", "if param is None: raise ValueError(\"Invalid value for `media_description`, must not be `None`\")", "-> str: return self._media_title @media_title.setter def media_title(self, param): if param is None: raise", "= param @property def media_size(self) -> float: return self._media_size @media_size.setter def media_size(self, param):", "return self._media_status_id @media_status_id.setter def media_status_id(self, param): if param is None: raise ValueError(\"Invalid value", "str, 'media_size': float, 'media_content_upload_date': str, 'media_content_updated_date': str } self.attribute_map = { 'media_id': 'media_id',", "str: return self._media_link @media_link.setter def media_link(self, param): if param is None: raise ValueError(\"Invalid", "not be `None`\") # noqa: E501 self._media_title = param @property def media_description(self) ->", "for `media_id`, must not be `None`\") # noqa: E501 self._media_id = param @property", "'media_status_id': int, 'media_data': str, 'media_link': str, 'media_title': str, 'media_description': str, 'media_size': float, 'media_content_upload_date':", "media_data self._media_link = media_link self._media_title = media_title self._media_description = media_description self._media_size = media_size", "@property def media_data(self) -> str: return self._media_data @media_data.setter def media_data(self, param): if param", "must not be `None`\") # noqa: E501 self._media_size = param @property def media_content_upload_date(self)", "is None: raise ValueError(\"Invalid value for `media_title`, must not be `None`\") # noqa:", "None: raise ValueError(\"Invalid value for `media_title`, must not be `None`\") # noqa: E501", "'media_size': 'media_size', 'media_content_upload_date': 'media_content_upload_date', 'media_content_updated_date': 'media_content_updated_date' } self._media_id = media_id self._media_status_id = media_status_id", "= media_data self._media_link = media_link self._media_title = media_title self._media_description = media_description self._media_size =", "not be `None`\") # noqa: E501 self._media_id = param @property def media_status_id(self) ->", "Model from jobbing import util class Media(Model): def __init__(self, media_id:int = None, media_status_id:int", "param): if param is None: raise ValueError(\"Invalid value for `media_link`, must not be", "# noqa: E501 self._media_description = param @property def media_size(self) -> float: return self._media_size", "media_link(self) -> str: return self._media_link @media_link.setter def media_link(self, param): if param is None:", "param): if param is None: raise ValueError(\"Invalid value for `media_description`, must not be", "# noqa: E501 self._media_link = param @property def media_title(self) -> str: return self._media_title", "must not be `None`\") # noqa: E501 self._media_link = param @property def media_title(self)", "None: raise ValueError(\"Invalid value for `media_content_upload_date`, must not be `None`\") # noqa: E501", "'media_content_updated_date' } self._media_id = media_id self._media_status_id = media_status_id self._media_data = media_data self._media_link =", "= param @property def media_status_id(self) -> int: return self._media_status_id @media_status_id.setter def media_status_id(self, param):", "@property def media_link(self) -> str: return self._media_link @media_link.setter def media_link(self, param): if param", "must not be `None`\") # noqa: E501 self._media_content_upload_date = param @property def media_content_updated_date(self)", "None, media_data:str = None, media_link:str = None, media_title:str = None, media_description:str = None,", "from jobbing import util class Media(Model): def __init__(self, media_id:int = None, media_status_id:int =", "def media_content_updated_date(self, param): if param is None: raise ValueError(\"Invalid value for `media_content_updated_date`, must", "def media_size(self, param): if param is None: raise ValueError(\"Invalid value for `media_size`, must", "-> int: return self._media_status_id @media_status_id.setter def media_status_id(self, param): if param is None: raise", "if param is None: raise ValueError(\"Invalid value for `media_title`, must not be `None`\")", "None: raise ValueError(\"Invalid value for `media_link`, must not be `None`\") # noqa: E501", "-> str: return self._media_content_upload_date @media_content_upload_date.setter def media_content_upload_date(self, param): if param is None: raise", "from __future__ import absolute_import from datetime import date, datetime # noqa: F401 from", "`media_link`, must not be `None`\") # noqa: E501 self._media_link = param @property def", "str, 'media_content_updated_date': str } self.attribute_map = { 'media_id': 'media_id', 'media_status_id': 'media_status_id', 'media_data': 'media_data',", "media_description self._media_size = media_size self._media_content_upload_date = media_content_upload_date self._media_content_updated_date = media_content_updated_date @classmethod def from_dict(cls,", "= None, media_status_id:int = None, media_data:str = None, media_link:str = None, media_title:str =", "jobbing import util class Media(Model): def __init__(self, media_id:int = None, media_status_id:int = None,", "'Media': return util.deserialize_model(dikt, cls) @property def media_id(self) -> int: return self._media_id @media_id.setter def", "if param is None: raise ValueError(\"Invalid value for `media_link`, must not be `None`\")", "media_id(self) -> int: return self._media_id @media_id.setter def media_id(self, param): if param is None:", "noqa: E501 self._media_id = param @property def media_status_id(self) -> int: return self._media_status_id @media_status_id.setter", "not be `None`\") # noqa: E501 self._media_size = param @property def media_content_upload_date(self) ->", "param): if param is None: raise ValueError(\"Invalid value for `media_content_upload_date`, must not be", "noqa: E501 self._media_content_upload_date = param @property def media_content_updated_date(self) -> str: return self._media_content_updated_date @media_content_updated_date.setter", "None, media_link:str = None, media_title:str = None, media_description:str = None, media_size:float = None,", "self._media_data = param @property def media_link(self) -> str: return self._media_link @media_link.setter def media_link(self," ]
[ "@dataclass class FocusedMode: a_focused: bool cache_file: CacheFile cache_tables: CacheTables = field(default_factory=CacheTables) def update_file(self,", "fil: Optional[YAMLFile] = None old_key : Optional[str] = None def update_file(self, fil): self.fil", "self.current_table = 0 @dataclass class CacheFile: filename: str typ: str update : Optional[str]", "= field(default_factory=CacheTables) def update_file(self, config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class Config:", "import dataclass, field from file import YAMLFile @dataclass class CacheTables: tables: Dict[str, list]", "0: self.current_table = 0 @dataclass class CacheFile: filename: str typ: str update :", "class Config: a_filename: str b_filename: str a_type: str b_type: str a_only: bool b_only:", "self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode: a_focused: bool cache_file: CacheFile cache_tables: CacheTables = field(default_factory=CacheTables)", "FocusedMode: a_focused: bool cache_file: CacheFile cache_tables: CacheTables = field(default_factory=CacheTables) def update_file(self, config, fil,", "CacheTables = field(default_factory=CacheTables) def update_file(self, config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class", "list] = field(default_factory=dict) table_names: List[str] = field(default_factory=list) current_table: Optional[int] = 0 def update_tables(self,", "fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class Config: a_filename: str b_filename: str a_type:", "filename: str typ: str update : Optional[str] = None fil: Optional[YAMLFile] = None", "= field(default_factory=CacheTables) def update_files(self, config, a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass", "<gh_stars>0 from typing import Dict, List, Optional from dataclasses import dataclass, field from", "self.current_table < 0: self.current_table = 0 @dataclass class CacheFile: filename: str typ: str", "Dict, List, Optional from dataclasses import dataclass, field from file import YAMLFile @dataclass", "bool table_names: List[str] similarity_threshold: float max_entries: int ignore_suffix: List[str] a_updated: bool b_updated: bool", "YAMLFile @dataclass class CacheTables: tables: Dict[str, list] = field(default_factory=dict) table_names: List[str] = field(default_factory=list)", "self.old_key = None @dataclass class DiffMode: a_cache_file : CacheFile b_cache_file : CacheFile cache_tables", "cache_tables : CacheTables = field(default_factory=CacheTables) def update_files(self, config, a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file)", "a_type: str b_type: str a_only: bool b_only: bool table_names: List[str] similarity_threshold: float max_entries:", "b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode: a_focused: bool cache_file: CacheFile", "b_filename: str a_type: str b_type: str a_only: bool b_only: bool table_names: List[str] similarity_threshold:", "old_key : Optional[str] = None def update_file(self, fil): self.fil = fil self.old_key =", "List[str] similarity_threshold: float max_entries: int ignore_suffix: List[str] a_updated: bool b_updated: bool a_update: Optional[str]", "1 if self.current_table < 0: self.current_table = 0 @dataclass class CacheFile: filename: str", "typing import Dict, List, Optional from dataclasses import dataclass, field from file import", "DiffMode: a_cache_file : CacheFile b_cache_file : CacheFile cache_tables : CacheTables = field(default_factory=CacheTables) def", "field(default_factory=list) current_table: Optional[int] = 0 def update_tables(self, config, tables): self.tables = tables if", "update : Optional[str] = None fil: Optional[YAMLFile] = None old_key : Optional[str] =", "= field(default_factory=dict) table_names: List[str] = field(default_factory=list) current_table: Optional[int] = 0 def update_tables(self, config,", "field(default_factory=CacheTables) def update_files(self, config, a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class", "field from file import YAMLFile @dataclass class CacheTables: tables: Dict[str, list] = field(default_factory=dict)", "= None @dataclass class DiffMode: a_cache_file : CacheFile b_cache_file : CacheFile cache_tables :", "Config: a_filename: str b_filename: str a_type: str b_type: str a_only: bool b_only: bool", "self.current_table = len(config.table_names) - 1 if self.current_table < 0: self.current_table = 0 @dataclass", ">= len(config.table_names): self.current_table = len(config.table_names) - 1 if self.current_table < 0: self.current_table =", "import Dict, List, Optional from dataclasses import dataclass, field from file import YAMLFile", "bool cache_file: CacheFile cache_tables: CacheTables = field(default_factory=CacheTables) def update_file(self, config, fil, tables): self.cache_file.update_file(fil)", "fil self.old_key = None @dataclass class DiffMode: a_cache_file : CacheFile b_cache_file : CacheFile", "tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode: a_focused: bool cache_file: CacheFile cache_tables:", "Optional[int] = 0 def update_tables(self, config, tables): self.tables = tables if self.current_table >=", "update_file(self, config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class Config: a_filename: str b_filename:", "- 1 if self.current_table < 0: self.current_table = 0 @dataclass class CacheFile: filename:", "str update : Optional[str] = None fil: Optional[YAMLFile] = None old_key : Optional[str]", ": CacheTables = field(default_factory=CacheTables) def update_files(self, config, a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config,", "config, tables): self.tables = tables if self.current_table >= len(config.table_names): self.current_table = len(config.table_names) -", ": Optional[str] = None fil: Optional[YAMLFile] = None old_key : Optional[str] = None", "None @dataclass class DiffMode: a_cache_file : CacheFile b_cache_file : CacheFile cache_tables : CacheTables", "Optional from dataclasses import dataclass, field from file import YAMLFile @dataclass class CacheTables:", "b_type: str a_only: bool b_only: bool table_names: List[str] similarity_threshold: float max_entries: int ignore_suffix:", "fil): self.fil = fil self.old_key = None @dataclass class DiffMode: a_cache_file : CacheFile", "table_names: List[str] similarity_threshold: float max_entries: int ignore_suffix: List[str] a_updated: bool b_updated: bool a_update:", "CacheFile cache_tables : CacheTables = field(default_factory=CacheTables) def update_files(self, config, a_file, b_file, tables): self.a_cache_file.update_file(a_file)", "update_file(self, fil): self.fil = fil self.old_key = None @dataclass class DiffMode: a_cache_file :", "update_files(self, config, a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode: a_focused:", "str b_filename: str a_type: str b_type: str a_only: bool b_only: bool table_names: List[str]", "tables if self.current_table >= len(config.table_names): self.current_table = len(config.table_names) - 1 if self.current_table <", "None fil: Optional[YAMLFile] = None old_key : Optional[str] = None def update_file(self, fil):", "Optional[str] = None def update_file(self, fil): self.fil = fil self.old_key = None @dataclass", ": CacheFile cache_tables : CacheTables = field(default_factory=CacheTables) def update_files(self, config, a_file, b_file, tables):", "field(default_factory=CacheTables) def update_file(self, config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class Config: a_filename:", "str a_only: bool b_only: bool table_names: List[str] similarity_threshold: float max_entries: int ignore_suffix: List[str]", "str b_type: str a_only: bool b_only: bool table_names: List[str] similarity_threshold: float max_entries: int", ": Optional[str] = None def update_file(self, fil): self.fil = fil self.old_key = None", "= field(default_factory=list) current_table: Optional[int] = 0 def update_tables(self, config, tables): self.tables = tables", "self.current_table >= len(config.table_names): self.current_table = len(config.table_names) - 1 if self.current_table < 0: self.current_table", "b_cache_file : CacheFile cache_tables : CacheTables = field(default_factory=CacheTables) def update_files(self, config, a_file, b_file,", "self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode: a_focused: bool cache_file: CacheFile cache_tables: CacheTables", "0 def update_tables(self, config, tables): self.tables = tables if self.current_table >= len(config.table_names): self.current_table", "List, Optional from dataclasses import dataclass, field from file import YAMLFile @dataclass class", "str a_type: str b_type: str a_only: bool b_only: bool table_names: List[str] similarity_threshold: float", "b_only: bool table_names: List[str] similarity_threshold: float max_entries: int ignore_suffix: List[str] a_updated: bool b_updated:", "def update_file(self, config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class Config: a_filename: str", "= None fil: Optional[YAMLFile] = None old_key : Optional[str] = None def update_file(self,", "len(config.table_names): self.current_table = len(config.table_names) - 1 if self.current_table < 0: self.current_table = 0", "typ: str update : Optional[str] = None fil: Optional[YAMLFile] = None old_key :", "Dict[str, list] = field(default_factory=dict) table_names: List[str] = field(default_factory=list) current_table: Optional[int] = 0 def", "CacheTables = field(default_factory=CacheTables) def update_files(self, config, a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables)", "@dataclass class CacheTables: tables: Dict[str, list] = field(default_factory=dict) table_names: List[str] = field(default_factory=list) current_table:", "class CacheTables: tables: Dict[str, list] = field(default_factory=dict) table_names: List[str] = field(default_factory=list) current_table: Optional[int]", "config, a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode: a_focused: bool", "tables) @dataclass class FocusedMode: a_focused: bool cache_file: CacheFile cache_tables: CacheTables = field(default_factory=CacheTables) def", "float max_entries: int ignore_suffix: List[str] a_updated: bool b_updated: bool a_update: Optional[str] b_update: Optional[str]", "def update_tables(self, config, tables): self.tables = tables if self.current_table >= len(config.table_names): self.current_table =", "def update_files(self, config, a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode:", "config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class Config: a_filename: str b_filename: str", "tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class Config: a_filename: str b_filename: str a_type: str", "self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass class Config: a_filename: str b_filename: str a_type: str b_type:", "< 0: self.current_table = 0 @dataclass class CacheFile: filename: str typ: str update", ": CacheFile b_cache_file : CacheFile cache_tables : CacheTables = field(default_factory=CacheTables) def update_files(self, config,", "def update_file(self, fil): self.fil = fil self.old_key = None @dataclass class DiffMode: a_cache_file", "class FocusedMode: a_focused: bool cache_file: CacheFile cache_tables: CacheTables = field(default_factory=CacheTables) def update_file(self, config,", "dataclasses import dataclass, field from file import YAMLFile @dataclass class CacheTables: tables: Dict[str,", "from dataclasses import dataclass, field from file import YAMLFile @dataclass class CacheTables: tables:", "= None old_key : Optional[str] = None def update_file(self, fil): self.fil = fil", "a_filename: str b_filename: str a_type: str b_type: str a_only: bool b_only: bool table_names:", "similarity_threshold: float max_entries: int ignore_suffix: List[str] a_updated: bool b_updated: bool a_update: Optional[str] b_update:", "field(default_factory=dict) table_names: List[str] = field(default_factory=list) current_table: Optional[int] = 0 def update_tables(self, config, tables):", "bool b_only: bool table_names: List[str] similarity_threshold: float max_entries: int ignore_suffix: List[str] a_updated: bool", "from typing import Dict, List, Optional from dataclasses import dataclass, field from file", "= len(config.table_names) - 1 if self.current_table < 0: self.current_table = 0 @dataclass class", "= 0 def update_tables(self, config, tables): self.tables = tables if self.current_table >= len(config.table_names):", "a_file, b_file, tables): self.a_cache_file.update_file(a_file) self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode: a_focused: bool cache_file:", "current_table: Optional[int] = 0 def update_tables(self, config, tables): self.tables = tables if self.current_table", "Optional[YAMLFile] = None old_key : Optional[str] = None def update_file(self, fil): self.fil =", "file import YAMLFile @dataclass class CacheTables: tables: Dict[str, list] = field(default_factory=dict) table_names: List[str]", "self.cache_tables.update_tables(config, tables) @dataclass class Config: a_filename: str b_filename: str a_type: str b_type: str", "= 0 @dataclass class CacheFile: filename: str typ: str update : Optional[str] =", "cache_file: CacheFile cache_tables: CacheTables = field(default_factory=CacheTables) def update_file(self, config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config,", "a_only: bool b_only: bool table_names: List[str] similarity_threshold: float max_entries: int ignore_suffix: List[str] a_updated:", "0 @dataclass class CacheFile: filename: str typ: str update : Optional[str] = None", "if self.current_table >= len(config.table_names): self.current_table = len(config.table_names) - 1 if self.current_table < 0:", "from file import YAMLFile @dataclass class CacheTables: tables: Dict[str, list] = field(default_factory=dict) table_names:", "self.tables = tables if self.current_table >= len(config.table_names): self.current_table = len(config.table_names) - 1 if", "CacheFile b_cache_file : CacheFile cache_tables : CacheTables = field(default_factory=CacheTables) def update_files(self, config, a_file,", "cache_tables: CacheTables = field(default_factory=CacheTables) def update_file(self, config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables) @dataclass", "a_focused: bool cache_file: CacheFile cache_tables: CacheTables = field(default_factory=CacheTables) def update_file(self, config, fil, tables):", "Optional[str] = None fil: Optional[YAMLFile] = None old_key : Optional[str] = None def", "self.b_cache_file.update_file(b_file) self.cache_tables.update_tables(config, tables) @dataclass class FocusedMode: a_focused: bool cache_file: CacheFile cache_tables: CacheTables =", "update_tables(self, config, tables): self.tables = tables if self.current_table >= len(config.table_names): self.current_table = len(config.table_names)", "= tables if self.current_table >= len(config.table_names): self.current_table = len(config.table_names) - 1 if self.current_table", "CacheFile cache_tables: CacheTables = field(default_factory=CacheTables) def update_file(self, config, fil, tables): self.cache_file.update_file(fil) self.cache_tables.update_tables(config, tables)", "@dataclass class CacheFile: filename: str typ: str update : Optional[str] = None fil:", "if self.current_table < 0: self.current_table = 0 @dataclass class CacheFile: filename: str typ:", "dataclass, field from file import YAMLFile @dataclass class CacheTables: tables: Dict[str, list] =", "tables) @dataclass class Config: a_filename: str b_filename: str a_type: str b_type: str a_only:", "CacheFile: filename: str typ: str update : Optional[str] = None fil: Optional[YAMLFile] =", "CacheTables: tables: Dict[str, list] = field(default_factory=dict) table_names: List[str] = field(default_factory=list) current_table: Optional[int] =", "= None def update_file(self, fil): self.fil = fil self.old_key = None @dataclass class", "a_cache_file : CacheFile b_cache_file : CacheFile cache_tables : CacheTables = field(default_factory=CacheTables) def update_files(self,", "str typ: str update : Optional[str] = None fil: Optional[YAMLFile] = None old_key", "class DiffMode: a_cache_file : CacheFile b_cache_file : CacheFile cache_tables : CacheTables = field(default_factory=CacheTables)", "len(config.table_names) - 1 if self.current_table < 0: self.current_table = 0 @dataclass class CacheFile:", "class CacheFile: filename: str typ: str update : Optional[str] = None fil: Optional[YAMLFile]", "None def update_file(self, fil): self.fil = fil self.old_key = None @dataclass class DiffMode:", "table_names: List[str] = field(default_factory=list) current_table: Optional[int] = 0 def update_tables(self, config, tables): self.tables", "None old_key : Optional[str] = None def update_file(self, fil): self.fil = fil self.old_key", "List[str] = field(default_factory=list) current_table: Optional[int] = 0 def update_tables(self, config, tables): self.tables =", "self.fil = fil self.old_key = None @dataclass class DiffMode: a_cache_file : CacheFile b_cache_file", "import YAMLFile @dataclass class CacheTables: tables: Dict[str, list] = field(default_factory=dict) table_names: List[str] =", "@dataclass class Config: a_filename: str b_filename: str a_type: str b_type: str a_only: bool", "= fil self.old_key = None @dataclass class DiffMode: a_cache_file : CacheFile b_cache_file :", "tables: Dict[str, list] = field(default_factory=dict) table_names: List[str] = field(default_factory=list) current_table: Optional[int] = 0", "@dataclass class DiffMode: a_cache_file : CacheFile b_cache_file : CacheFile cache_tables : CacheTables =", "tables): self.tables = tables if self.current_table >= len(config.table_names): self.current_table = len(config.table_names) - 1" ]
[ "Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo", "1. Member loads aligned in global directions. 2. A member internal hinge. 3.", "the midspan of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 * 12) # Analyze", "35.45 # The reactions were compared manually to Kassimali's solution and the shears", "finite element method is a little more accurate than the simplified method #", "\"\"\" import unittest from PyNite import FEModel3D import math import sys from io", "= SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal place accuracy requires +/-0.5% accuracy # one", "the model frame.analyze() # subTest context manager prints which portion fails, if any", "normal sys.stdout = sys.__stdout__ def test_XY_gravity_load(self): # A First Course in the Finite", "frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) # from PyNite.Visualization import render_model # render_model(frame, text_height=0.5, case='Case", "# Check displacements at N3 and N4 correct_displacements = [('N3', {'DY': -6.666757, 'RX':", "# one decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0,", "io import StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing 2D frames. ''' def", "Iy, Iz, J, A) frame.add_member('M3', 'N4', 'N3', E, G, Iy, Iz, J, A)", "frame.add_node('N2', 0, 7.667, 0) # ft frame.add_node('N3', 7.75, 7.667, 0) # ft frame.add_node('N4',", "(14 ft = 168 in apart) SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\", 0, 0,", "of the beam correct_reactions = [('N1', -2.5), ('N2', -2.5)] for node_name, rxn in", "# Check reactions at N1 and N6 correct_reactions = [('N1', {'RxnFZ': 11.6877, 'RxnFY':", "frame.analyze() AZ = -8.63 AX = 15.46 BZ = -11.37 BX = 35.45", "loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30) # Analyze the model frame.analyze() #", "\"\"\" MIT License Copyright (c) 2020 <NAME>, SE; tamalone1 \"\"\" import unittest from", "2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self): frame =", "# Create members (all members will have the same properties in this example)", "0) # ft frame.add_node('N4', 7.75, 0, 0) # ft # Add supports frame.def_support('N1',", "FEModel3D() # Add nodes (14 ft = 168 in apart) SimpleBeam.add_node(\"N1\", 0, 0,", "internal hinge. 3. A point load at the end of a member. The", "big to be a rounding error alone. # Likely the finite element method", "= 29000*12**2 G = 11200*12**2 Iy = 17.3/12**4 Iz = 204/12**4 J =", "# A First Course in the Finite Element Method, 4th Edition # <NAME>", "frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6', 50*12, 0, 0) # Define the supports frame.def_support('N1',", "frame.add_member('M3', 'N3', 'N4', E, G, Iy, Iz, J, A) frame.add_member('M4', 'N4', 'N5', E,", "= 12 frame.add_member('M1', 'N1', 'N2', E, G, Iz, Iy, J, A) frame.add_member('M2', 'N2',", "AZ = -8.63 AX = 15.46 BZ = -11.37 BX = 35.45 #", "A) # Provide simple supports SimpleBeam.def_support(\"N1\", True, True, True, False, False, True) SimpleBeam.def_support(\"N2\",", "= 29000 G = 11400 Iy = 100 Iz = 150 J =", "and N6 correct_reactions = [('N1', {'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ':", "Iy, Iz, J, A) frame.add_member('CE', 'C', 'E', E, G, Iy, Iz, J, A)", "1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) # Check displacements", "A First Course in the Finite Element Method, 4th Edition # <NAME> #", "def test_XY_member_ptload(self): frame = FEModel3D() # Add nodes frame.add_node('N1', 0, 0, 0) #", "to change things up a bit. \"\"\" frame = FEModel3D() frame.add_node('A', 0, 0,", "= FEModel3D() # Define the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12,", "the end of a member. The example will be run in the XZ", "support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20,", "= 250 Iy = 250 Iz = 200 E = 30000 G =", "to normal sys.stdout = sys.__stdout__ def test_XY_gravity_load(self): # A First Course in the", "Iz, J, A) frame.add_member('M3', 'N4', 'N3', E, G, Iy, Iz, J, A) #", "self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def test_XZ_ptload(self): # A simply supported beam with a", "up a bit. \"\"\" frame = FEModel3D() frame.add_node('A', 0, 0, 0) frame.add_node('B', 0,", "0, 24) frame.add_node('E', 24, 0, 12) E = 29000*12**2 G = 11200*12**2 Iy", "# A simply supported beam with a point load. # Units used in", "in correct_reactions: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy requires", "requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2)", "= 30000 G = 250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G,", "168 in apart) SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\", 0, 0, 168) # Add", "0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ - 1), 0.7) self.assertLess(abs(frame.Nodes['B'].RxnFX['Combo 1']/BX", "-1.5, -1.5) # from PyNite.Visualization import render_model # render_model(frame, text_height=0.5, case='Case 1') frame.analyze()", "7.667, 0) # ft frame.add_node('N4', 7.75, 0, 0) # ft # Add supports", "Copyright (c) 2020 <NAME>, SE; tamalone1 \"\"\" import unittest from PyNite import FEModel3D", "for node_name, rxn in correct_reactions: with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two", "Define the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 0,", "each end of the beam correct_reactions = [('N1', -2.5), ('N2', -2.5)] for node_name,", "frame.add_node('C', 12, 0, 0) frame.add_node('D', 12, 0, 24) frame.add_node('E', 24, 0, 12) E", "import StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing 2D frames. ''' def setUp(self):", "True, True, True, True) frame.def_support('N6', True, True, True, True, True, True) # Create", "frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6',", "Iz, J, A) frame.add_member('M4', 'N4', 'N5', E, G, Iy, Iz, J, A) frame.add_member('M5',", "= 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G, Iy, Iz, J, A) # Provide", "'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N4', 'N3', E, G,", "Iy, Iz, J, A) frame.add_member('M4', 'N4', 'N5', E, G, Iy, Iz, J, A)", "0, 168) # Add a beam with the following properties: A = 20", "# Print reactions at each end of the beam correct_reactions = [('N1', -2.5),", "''' Tests of analyzing 2D frames. ''' def setUp(self): # Suppress printed output", "ft^4 J = 0.346/12**4 # ft^4 A = 5.26/12**2 # in^2 # Define", "2) def test_XY_member_ptload(self): frame = FEModel3D() # Add nodes frame.add_node('N1', 0, 0, 0)", "one decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self): \"\"\" Tests against", "to be a rounding error alone. # Likely the finite element method is", "29000 G = 11400 Iy = 100 Iz = 150 J = 250", "7.75, 7.667, 0) # ft frame.add_node('N4', 7.75, 0, 0) # ft # Add", "A simply supported beam with a point load. # Units used in this", "Provide simple supports SimpleBeam.def_support(\"N1\", True, True, True, False, False, True) SimpleBeam.def_support(\"N2\", True, True,", "True, True, True, False, False, False) # Add a point load of 5", "Iy, J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30)", "SimpleBeam.def_support(\"N2\", True, True, True, False, False, False) # Add a point load of", "frame.add_node('N3', 7.75, 7.667, 0) # ft frame.add_node('N4', 7.75, 0, 0) # ft #", "1'] # Update the expected value to an appropriate precision expected_RZ = 0.00022794540510395617", "True, True, True, True, False) # Define material and section properties for a", "correct_reactions = [('N1', -2.5), ('N2', -2.5)] for node_name, rxn in correct_reactions: with self.subTest(node=node_name):", "-0.032})] for name, values in correct_displacements: with self.subTest(node=name): node = frame.Nodes[name] # Two", "# ft^4 A = 5.26/12**2 # in^2 # Define members frame.add_member('M1', 'N1', 'N2',", "= [('N1', {'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY': 30,", "MIT License Copyright (c) 2020 <NAME>, SE; tamalone1 \"\"\" import unittest from PyNite", "frame.def_support('N1', True, True, True, True, True, False) frame.def_support('N4', True, True, True, True, True,", "Element Method, 4th Edition # <NAME> # Problem 5.30 # Units for this", "requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0,", "A First Course in the Finite Element Method, 4th Edition # Daryl <NAME>", "G = 250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz,", "G, Iy, Iz, J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4',", "Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5)", "# Analyze the model frame.analyze() # subTest context manager prints which portion fails,", "SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\", 0, 0, 168) # Add a beam with", "following features: 1. Member loads aligned in global directions. 2. A member internal", "Reset the print function to normal sys.stdout = sys.__stdout__ def test_XY_gravity_load(self): # A", "0) frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6', 50*12, 0, 0) # Define the supports", "Iz = 200 E = 30000 G = 250 A = 12 frame.add_member('M1',", "test_XY_member_ptload(self): frame = FEModel3D() # Add nodes frame.add_node('N1', 0, 0, 0) # ft", "0) # ft frame.add_node('N3', 7.75, 7.667, 0) # ft frame.add_node('N4', 7.75, 0, 0)", "requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0,", "2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def test_XZ_ptload(self): # A simply supported beam with", "- 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ - 1), 0.7)", "supports SimpleBeam.def_support(\"N1\", True, True, True, False, False, True) SimpleBeam.def_support(\"N2\", True, True, True, False,", "apart) SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\", 0, 0, 168) # Add a beam", "Iy, Iz, J, A) # Add loads to the frame frame.add_member_pt_load('M2', 'Fy', -5,", "Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30) # Analyze the model", "Check displacements at N3 and N4 correct_displacements = [('N3', {'DY': -6.666757, 'RX': 0.032}),", "17.3/12**4 Iz = 204/12**4 J = 0.3/12**4 A = 7.65/12**2 frame.add_member('AC', 'A', 'C',", "SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 * 12) # Analyze the beam SimpleBeam.analyze(False) # Print", "render_model(frame, text_height=0.5, case='Case 1') frame.analyze() AZ = -8.63 AX = 15.46 BZ =", "True, True, True) frame.def_support('N6', True, True, True, True, True, True) # Create members", "sys from io import StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing 2D frames.", "1.0, 2) # Check displacements at N3 and N4 correct_displacements = [('N3', {'DY':", "J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30) #", "-0.024) # W8x24 self-weight # Analyze the frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1']", "J, A) frame.add_member('BD', 'B', 'D', E, G, Iy, Iz, J, A) frame.add_member('CE', 'C',", "A) # Add loads to the frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) # 5", "global directions. 2. A member internal hinge. 3. A point load at the", "= StringIO() def tearDown(self): # Reset the print function to normal sys.stdout =", "rounding error alone. # Likely the finite element method is a little more", "ft = 168 in apart) SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\", 0, 0, 168)", "-30) # Analyze the model frame.analyze() # subTest context manager prints which portion", "change things up a bit. \"\"\" frame = FEModel3D() frame.add_node('A', 0, 0, 0)", "sys.stdout = sys.__stdout__ def test_XY_gravity_load(self): # A First Course in the Finite Element", "frame.add_member('M3', 'N4', 'N3', E, G, Iy, Iz, J, A) # Add loads to", "BX = 35.45 # The reactions were compared manually to Kassimali's solution and", "'N1', 'N2', E, G, Iy, Iz, J, A) frame.add_member('M2', 'N2', 'N3', E, G,", "of 5 kips at the midspan of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7", "the finite element method is a little more accurate than the simplified method", "are kips and inches frame = FEModel3D() # Define the nodes frame.add_node('N1', 0,", "than the simplified method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo", "J, A) frame.add_member('M3', 'N4', 'N3', E, G, Iy, Iz, J, A) # Add", "0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4', 0, 40*12, 35*12)", "J, A) frame.add_member('M4', 'N4', 'N5', E, G, Iy, Iz, J, A) frame.add_member('M5', 'N5',", "frame.add_member('M5', 'N5', 'N6', E, G, Iy, Iz, J, A) # Add nodal loads", "E, G, Iy, Iz, J, A) frame.add_member('CE', 'C', 'E', E, G, Iy, Iz,", "[('N1', -2.5), ('N2', -2.5)] for node_name, rxn in correct_reactions: with self.subTest(node=node_name): calculated_reaction =", "SE; tamalone1 \"\"\" import unittest from PyNite import FEModel3D import math import sys", "# Define the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3',", "250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iz, Iy, J, A)", "in the XZ plane to change things up a bit. \"\"\" frame =", "appropriate precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self): # A First", "requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2)", "24) frame.add_node('C', 12, 0, 0) frame.add_node('D', 12, 0, 24) frame.add_node('E', 24, 0, 12)", "Analyze the model frame.analyze() # subTest context manager prints which portion fails, if", "'C', E, G, Iy, Iz, J, A) frame.add_member('BD', 'B', 'D', E, G, Iy,", "ft frame.add_node('N2', 0, 7.667, 0) # ft frame.add_node('N3', 7.75, 7.667, 0) # ft", "model frame.analyze() # subTest context manager prints which portion fails, if any #", "FEModel3D() frame.add_node('A', 0, 0, 0) frame.add_node('B', 0, 0, 24) frame.add_node('C', 12, 0, 0)", "E = 29000*12**2 # ksf G = 1111200*12**2 # ksf Iy = 18.3/12**4", "def test_XZ_ptload(self): # A simply supported beam with a point load. # Units", "True, True, True, False) # Define material and section properties for a W8x24", "License Copyright (c) 2020 <NAME>, SE; tamalone1 \"\"\" import unittest from PyNite import", "subTest context manager prints which portion fails, if any correct_values = [('N1', {'RxnFX':", "= 35.45 # The reactions were compared manually to Kassimali's solution and the", "accuracy # one decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self): \"\"\"", "def setUp(self): # Suppress printed output temporarily sys.stdout = StringIO() def tearDown(self): #", "Course in the Finite Element Method, 4th Edition # Daryl <NAME> # Problem", "a little big to be a rounding error alone. # Likely the finite", "self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def test_XZ_ptload(self): # A simply", "correct_values = [('N1', {'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY':", "properties for a W8x24 E = 29000*12**2 # ksf G = 1111200*12**2 #", "ksf G = 1111200*12**2 # ksf Iy = 18.3/12**4 # ft^4 Iz =", "frame.add_member('M1', 'N1', 'N2', E, G, Iz, Iy, J, A) frame.add_member('M2', 'N2', 'N3', E,", "E, G, Iy, Iz, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz,", "it's a little big to be a rounding error alone. # Likely the", "features: 1. Member loads aligned in global directions. 2. A member internal hinge.", "Tests of analyzing 2D frames. ''' def setUp(self): # Suppress printed output temporarily", "in global directions. 2. A member internal hinge. 3. A point load at", "'N5', 'N6', E, G, Iz, Iy, J, A) # Add nodal loads frame.add_node_load('N3',", "Print reactions at each end of the beam correct_reactions = [('N1', -2.5), ('N2',", "Check reactions at N1 and N6 correct_reactions = [('N1', {'RxnFZ': 11.6877, 'RxnFY': 30,", "Iy = 18.3/12**4 # ft^4 Iz = 82.7/12**4 # ft^4 J = 0.346/12**4", "= 250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iz, Iy, J,", "'RxnFY': 30, 'RxnMX': -1810.0745})] for name, values in correct_reactions: with self.subTest(node=name): node =", "kips SimpleBeam = FEModel3D() # Add nodes (14 ft = 168 in apart)", "# Units used in this example are inches, and kips SimpleBeam = FEModel3D()", "Add a point load of 5 kips at the midspan of the beam", "\"\"\" frame = FEModel3D() frame.add_node('A', 0, 0, 0) frame.add_node('B', 0, 0, 24) frame.add_node('C',", "will have the same properties in this example) J = 250 Iy =", "context manager prints which portion fails, if any # Check reactions at N1", "correct_displacements: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy requires +/-0.5%", "frame.add_node('B', 0, 0, 24) frame.add_node('C', 12, 0, 0) frame.add_node('D', 12, 0, 24) frame.add_node('E',", "nodes (14 ft = 168 in apart) SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\", 0,", "0, 0) frame.add_node('B', 0, 0, 24) frame.add_node('C', 12, 0, 0) frame.add_node('D', 12, 0,", "requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def", "sys.__stdout__ def test_XY_gravity_load(self): # A First Course in the Finite Element Method, 4th", "18.3/12**4 # ft^4 Iz = 82.7/12**4 # ft^4 J = 0.346/12**4 # ft^4", "J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N4',", "Daryl <NAME> # Problem 5.30 # Units for this model are kips and", "frame.add_member('BD', 'B', 'D', E, G, Iy, Iz, J, A) frame.add_member('CE', 'C', 'E', E,", "frame = FEModel3D() # Add nodes frame.add_node('N1', 0, 0, 0) # ft frame.add_node('N2',", "inches frame = FEModel3D() # Define the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2',", "True, False, False, False) # Add a point load of 5 kips at", "E = 29000 G = 11400 Iy = 100 Iz = 150 J", "for a W8x24 E = 29000*12**2 # ksf G = 1111200*12**2 # ksf", "PyNite import FEModel3D import math import sys from io import StringIO class Test_2D_Frame(unittest.TestCase):", "0, 0) frame.add_node('D', 12, 0, 24) frame.add_node('E', 24, 0, 12) E = 29000*12**2", "from io import StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing 2D frames. '''", "calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] # Update the expected value to an appropriate precision", "values in correct_displacements: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy", "J, A) frame.add_member('CE', 'C', 'E', E, G, Iy, Iz, J, A) frame.add_member('ED', 'E',", "# subTest context manager prints which portion fails, if any correct_values = [('N1',", "frame.add_node('N6', 0, 0, 50*12) # Define the supports frame.def_support('N1', True, True, True, True,", "True, False, False, True) SimpleBeam.def_support(\"N2\", True, True, True, False, False, False) # Add", "1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def test_XZ_ptload(self): # A simply supported", "beam correct_reactions = [('N1', -2.5), ('N2', -2.5)] for node_name, rxn in correct_reactions: with", "support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX', -1.5,", "one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2)", "12 frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A) frame.add_member('M2', 'N2', 'N3',", "this example are inches, and kips SimpleBeam = FEModel3D() # Add nodes (14", "-5, 7.75/2) # 5 kips @ midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) # W8x24", "accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'],", "import sys from io import StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing 2D", "place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'],", "directions. 2. A member internal hinge. 3. A point load at the end", "('N2', -2.5)] for node_name, rxn in correct_reactions: with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1']", "and section properties for a W8x24 E = 29000*12**2 # ksf G =", "False, False, False) # Add a point load of 5 kips at the", "'E', E, G, Iy, Iz, J, A) frame.add_member('ED', 'E', 'D', E, G, Iy,", "place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'],", "frames. ''' def setUp(self): # Suppress printed output temporarily sys.stdout = StringIO() def", "{'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})] for name, values in correct_values: with self.subTest(node=name):", "24, 0, 12) E = 29000*12**2 G = 11200*12**2 Iy = 17.3/12**4 Iz", "G, Iy, Iz, J, A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True)", "place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0,", "'N6', E, G, Iz, Iy, J, A) # Add nodal loads frame.add_node_load('N3', 'FY',", "1']/values['RX'], 1.0, 2) def test_XZ_ptload(self): # A simply supported beam with a point", "3. A point load at the end of a member. The example will", "frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6', 50*12, 0, 0) #", "# Define members frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A) frame.add_member('M2',", "'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})] for name,", "FEModel3D import math import sys from io import StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests", "= 0.3/12**4 A = 7.65/12**2 frame.add_member('AC', 'A', 'C', E, G, Iy, Iz, J,", "displacements at N3 and N4 correct_displacements = [('N3', {'DY': -6.666757, 'RX': 0.032}), ('N4',", "for this model are kips and inches frame = FEModel3D() # Define the", "self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy requires +/-0.5% accuracy #", "{'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})] for name, values in correct_reactions: with self.subTest(node=name):", "<NAME> # Problem 5.30 # Units for this model are kips and inches", "any correct_values = [('N1', {'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877,", "# The reactions were compared manually to Kassimali's solution and the shears were", "Iz, J, A) frame.add_member('ED', 'E', 'D', E, G, Iy, Iz, J, A) frame.def_support('A',", "FEModel3D() # Define the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0)", "Likely the finite element method is a little more accurate than the simplified", "accurate than the simplified method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1)", "<NAME>, SE; tamalone1 \"\"\" import unittest from PyNite import FEModel3D import math import", "Add supports frame.def_support('N1', True, True, True, True, True, False) frame.def_support('N4', True, True, True,", "this example) J = 250 Iy = 250 Iz = 200 E =", "used in this example are inches, and kips SimpleBeam = FEModel3D() # Add", "self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ -", "correct_reactions: with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal place accuracy requires", "a member. The example will be run in the XZ plane to change", "0, 12) E = 29000*12**2 G = 11200*12**2 Iy = 17.3/12**4 Iz =", "1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ - 1), 0.7) self.assertLess(abs(frame.Nodes['B'].RxnFX['Combo 1']/BX - 1),", "calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal place accuracy requires +/-0.5% accuracy #", "1.0, 2) def test_XY_member_ptload(self): frame = FEModel3D() # Add nodes frame.add_node('N1', 0, 0,", "frame.add_member('M5', 'N5', 'N6', E, G, Iz, Iy, J, A) # Add nodal loads", "ksf Iy = 18.3/12**4 # ft^4 Iz = 82.7/12**4 # ft^4 J =", "Iz, J, A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True)", "requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def test_XZ_ptload(self): #", "+/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2)", "0, 0, 0) # ft frame.add_node('N2', 0, 7.667, 0) # ft frame.add_node('N3', 7.75,", "35*12) frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6', 0, 0, 50*12) # Define the supports", "G, Iz, Iy, J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4',", "values in correct_reactions: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy", "test_XZ_ptload(self): # A simply supported beam with a point load. # Units used", "the shears were within # 10% and 7% respectively. That seems like it's", "support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12)", "frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] # Update the expected value to an appropriate", "= 250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J,", "import math import sys from io import StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests of", "0) # ft frame.add_node('N2', 0, 7.667, 0) # ft frame.add_node('N3', 7.75, 7.667, 0)", "a little more accurate than the simplified method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ", "the Finite Element Method, 4th Edition # Daryl <NAME> # Problem 5.30 #", "True, True, True, True) # Create members (all members will have the same", "Iz, J, A) # Add loads to the frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2)", "Iy, Iz, J, A) frame.add_member('ED', 'E', 'D', E, G, Iy, Iz, J, A)", "simple supports SimpleBeam.def_support(\"N1\", True, True, True, False, False, True) SimpleBeam.def_support(\"N2\", True, True, True,", "24) frame.add_node('E', 24, 0, 12) E = 29000*12**2 G = 11200*12**2 Iy =", "'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) # from", "\"N1\", \"N2\", E, G, Iy, Iz, J, A) # Provide simple supports SimpleBeam.def_support(\"N1\",", "G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iy, Iz, J,", "E, G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iz, Iy,", "G = 11200*12**2 Iy = 17.3/12**4 Iz = 204/12**4 J = 0.3/12**4 A", "self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) # Check displacements at N3", "from PyNite import FEModel3D import math import sys from io import StringIO class", "5 kips at the midspan of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 *", "2D frames. ''' def setUp(self): # Suppress printed output temporarily sys.stdout = StringIO()", "(all members will have the same properties in this example) J = 250", "Tests against Kassimali example 3.35. This example was selected because it allows us", "0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4', 0,", "run in the XZ plane to change things up a bit. \"\"\" frame", "A member internal hinge. 3. A point load at the end of a", "self-weight # Analyze the frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] # Update the", "ft # Add supports frame.def_support('N1', True, True, True, True, True, False) frame.def_support('N4', True,", "frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED',", "1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def test_XZ_ptload(self): # A simply supported beam", "reactions were compared manually to Kassimali's solution and the shears were within #", "kips at the midspan of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 * 12)", "True, True, False, False, True) SimpleBeam.def_support(\"N2\", True, True, True, False, False, False) #", "= [('N3', {'DY': -6.666757, 'RX': 0.032}), ('N4', {'DY': -6.666757, 'RX': -0.032})] for name,", "StringIO() def tearDown(self): # Reset the print function to normal sys.stdout = sys.__stdout__", "1']/values['RxnMX'], 1.0, 2) # Check displacements at N3 and N4 correct_displacements = [('N3',", "place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'],", "Element Method, 4th Edition # Daryl <NAME> # Problem 5.30 # Units for", "'N5', E, G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iy,", "'RxnMX': -1810.0745})] for name, values in correct_reactions: with self.subTest(node=name): node = frame.Nodes[name] #", "True) frame.def_support('N6', True, True, True, True, True, True) # Create members (all members", "setUp(self): # Suppress printed output temporarily sys.stdout = StringIO() def tearDown(self): # Reset", "one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2)", "accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'],", "place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def test_XZ_ptload(self):", "# A First Course in the Finite Element Method, 4th Edition # Daryl", "20, 12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) # from PyNite.Visualization", "things up a bit. \"\"\" frame = FEModel3D() frame.add_node('A', 0, 0, 0) frame.add_node('B',", "0, 50*12) # Define the supports frame.def_support('N1', True, True, True, True, True, True)", "False) # Define material and section properties for a W8x24 E = 29000*12**2", "the beam SimpleBeam.analyze(False) # Print reactions at each end of the beam correct_reactions", "# one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0,", "Method, 4th Edition # Daryl <NAME> # Problem 5.30 # Units for this", "J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iy, Iz, J, A) # Add", "= 1111200*12**2 # ksf Iy = 18.3/12**4 # ft^4 Iz = 82.7/12**4 #", "# Add loads to the frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) # 5 kips", "and kips SimpleBeam = FEModel3D() # Add nodes (14 ft = 168 in", "= [('N1', -2.5), ('N2', -2.5)] for node_name, rxn in correct_reactions: with self.subTest(node=node_name): calculated_reaction", "in this example) J = 250 Iy = 250 Iz = 200 E", "hinge. 3. A point load at the end of a member. The example", "requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2)", "import FEModel3D import math import sys from io import StringIO class Test_2D_Frame(unittest.TestCase): '''", "nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 15*12, 40*12, 0)", "# Add supports frame.def_support('N1', True, True, True, True, True, False) frame.def_support('N4', True, True,", "frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) # 5 kips @ midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024)", "0) frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5', 0, 30*12, 50*12)", "name, values in correct_displacements: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place", "member. The example will be run in the XZ plane to change things", "frame.analyze() # subTest context manager prints which portion fails, if any # Check", "{'DY': -6.666757, 'RX': 0.032}), ('N4', {'DY': -6.666757, 'RX': -0.032})] for name, values in", "12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) # from PyNite.Visualization import", "# ft # Add supports frame.def_support('N1', True, True, True, True, True, False) frame.def_support('N4',", "midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) # W8x24 self-weight # Analyze the frame frame.analyze()", "30000 G = 250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iy,", "decimal place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo", "frame.Nodes['N1'].RZ['Combo 1'] # Update the expected value to an appropriate precision expected_RZ =", "in the Finite Element Method, 4th Edition # <NAME> # Problem 5.30 #", "frame.def_support('N4', True, True, True, True, True, False) # Define material and section properties", "J, A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE',", "-8.63 AX = 15.46 BZ = -11.37 BX = 35.45 # The reactions", "accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0,", "15.46 BZ = -11.37 BX = 35.45 # The reactions were compared manually", "= 11400 Iy = 100 Iz = 150 J = 250 SimpleBeam.add_member(\"M1\", \"N1\",", "with a point load. # Units used in this example are inches, and", "frame.Nodes[name] # Two decimal place accuracy requires +/-0.5% accuracy # one decimal place", "0) # Define the supports frame.def_support('N1', True, True, True, True, True, True) frame.def_support('N6',", "a point load. # Units used in this example are inches, and kips", "1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self):", "30000 G = 250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iz,", "the XZ plane to change things up a bit. \"\"\" frame = FEModel3D()", "0) frame.add_node('D', 12, 0, 24) frame.add_node('E', 24, 0, 12) E = 29000*12**2 G", "250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G, Iy, Iz, J, A) # Provide simple", "frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N3', 'N4', E,", "\"N2\", E, G, Iy, Iz, J, A) # Provide simple supports SimpleBeam.def_support(\"N1\", True,", "frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) #", "have the same properties in this example) J = 250 Iy = 250", "J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iz, Iy, J, A) # Add", "False, False) # Add a point load of 5 kips at the midspan", "for name, values in correct_displacements: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal", "{'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})]", "= 168 in apart) SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\", 0, 0, 168) #", "loads aligned in global directions. 2. A member internal hinge. 3. A point", "with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy requires +/-0.5% accuracy", "- 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ - 1), 0.7) self.assertLess(abs(frame.Nodes['B'].RxnFX['Combo 1']/BX - 1), 0.05)", "'N2', E, G, Iz, Iy, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy,", "= 5.26/12**2 # in^2 # Define members frame.add_member('M1', 'N1', 'N2', E, G, Iy,", "40*12, 15*12) frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6', 0, 0,", "12) # Analyze the beam SimpleBeam.analyze(False) # Print reactions at each end of", "W8x24 E = 29000*12**2 # ksf G = 1111200*12**2 # ksf Iy =", "will be run in the XZ plane to change things up a bit.", "SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal place accuracy requires +/-0.5% accuracy # one decimal", "error alone. # Likely the finite element method is a little more accurate", "StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing 2D frames. ''' def setUp(self): #", "1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ - 1),", "decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo", "and inches frame = FEModel3D() # Define the nodes frame.add_node('N1', 0, 0, 0)", "Iz = 150 J = 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G, Iy, Iz,", "False) frame.def_support('N4', True, True, True, True, True, False) # Define material and section", "+/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self):", "node = frame.Nodes[name] # Two decimal place accuracy requires +/-0.5% accuracy # one", "def test_XY_gravity_load(self): # A First Course in the Finite Element Method, 4th Edition", "E = 30000 G = 250 A = 12 frame.add_member('M1', 'N1', 'N2', E,", "frame.add_member('M4', 'N4', 'N5', E, G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E,", "frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] # Update the expected value to an", "The reactions were compared manually to Kassimali's solution and the shears were within", "E, G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iy, Iz,", "of analyzing 2D frames. ''' def setUp(self): # Suppress printed output temporarily sys.stdout", "A) frame.add_member('M5', 'N5', 'N6', E, G, Iy, Iz, J, A) # Add nodal", "7.75, 0, 0) # ft # Add supports frame.def_support('N1', True, True, True, True,", "# 5 kips @ midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) # W8x24 self-weight #", "0, 7.667, 0) # ft frame.add_node('N3', 7.75, 7.667, 0) # ft frame.add_node('N4', 7.75,", "Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iy, Iz, J, A) #", "0, 0) # ft frame.add_node('N2', 0, 7.667, 0) # ft frame.add_node('N3', 7.75, 7.667,", "Iz = 82.7/12**4 # ft^4 J = 0.346/12**4 # ft^4 A = 5.26/12**2", "# Provide simple supports SimpleBeam.def_support(\"N1\", True, True, True, False, False, True) SimpleBeam.def_support(\"N2\", True,", "end of a member. The example will be run in the XZ plane", "frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC',", "True) # Create members (all members will have the same properties in this", "manager prints which portion fails, if any correct_values = [('N1', {'RxnFX': 11.6877, 'RxnFY':", "30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})] for name, values", "# Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30) # Analyze the", "simply supported beam with a point load. # Units used in this example", "A = 20 E = 29000 G = 11400 Iy = 100 Iz", "= 7.65/12**2 frame.add_member('AC', 'A', 'C', E, G, Iy, Iz, J, A) frame.add_member('BD', 'B',", "100 Iz = 150 J = 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G, Iy,", "'A', 'C', E, G, Iy, Iz, J, A) frame.add_member('BD', 'B', 'D', E, G,", "Iy, Iz, J, A) frame.add_member('BD', 'B', 'D', E, G, Iy, Iz, J, A)", "30*12, 0) frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5', 50*12, 30*12,", "+/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo", "frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A) frame.add_member('M2', 'N2', 'N3', E,", "this model are kips and inches frame = FEModel3D() # Define the nodes", "the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 0, 40*12,", "frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6', 0, 0, 50*12) #", "self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self): \"\"\" Tests against Kassimali example 3.35. This example", "was selected because it allows us to check the following features: 1. Member", "frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) # from PyNite.Visualization import render_model", "True, True, False) # Define material and section properties for a W8x24 E", "self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal place accuracy requires +/-0.5% accuracy", "Add loads to the frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) # 5 kips @", "True, True) frame.def_support('N6', True, True, True, True, True, True) # Create members (all", "True, True, True, True, True, False) frame.def_support('N4', True, True, True, True, True, False)", "analyzing 2D frames. ''' def setUp(self): # Suppress printed output temporarily sys.stdout =", "30*12, 0) frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5', 0, 30*12,", "frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4',", "0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4', 35*12, 40*12, 0)", "Iy = 17.3/12**4 Iz = 204/12**4 J = 0.3/12**4 A = 7.65/12**2 frame.add_member('AC',", "= sys.__stdout__ def test_XY_gravity_load(self): # A First Course in the Finite Element Method,", "# Daryl <NAME> # Problem 5.30 # Units for this model are kips", "BZ = -11.37 BX = 35.45 # The reactions were compared manually to", "5.26/12**2 # in^2 # Define members frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz,", "1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self): frame = FEModel3D() # Add nodes frame.add_node('N1', 0,", "were compared manually to Kassimali's solution and the shears were within # 10%", "1.0, 2) def test_Kassimali_3_35(self): \"\"\" Tests against Kassimali example 3.35. This example was", "beam SimpleBeam.analyze(False) # Print reactions at each end of the beam correct_reactions =", "decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def", "frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5,", "Update the expected value to an appropriate precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0,", "uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ", "in this example are inches, and kips SimpleBeam = FEModel3D() # Add nodes", "the beam correct_reactions = [('N1', -2.5), ('N2', -2.5)] for node_name, rxn in correct_reactions:", "Define members frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A) frame.add_member('M2', 'N2',", "# Two decimal place accuracy requires +/-0.5% accuracy # one decimal place requires", "True, True, True, True, True) frame.def_support('N6', True, True, True, True, True, True) #", "compared manually to Kassimali's solution and the shears were within # 10% and", "0.032}), ('N4', {'DY': -6.666757, 'RX': -0.032})] for name, values in correct_displacements: with self.subTest(node=name):", "the Finite Element Method, 4th Edition # <NAME> # Problem 5.30 # Units", "A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N4', 'N3',", "E, G, Iz, Iy, J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30)", "-1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})] for name, values in correct_values:", "kips @ midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) # W8x24 self-weight # Analyze the", "1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) # Check displacements at N3 and N4", "# ft frame.add_node('N3', 7.75, 7.667, 0) # ft frame.add_node('N4', 7.75, 0, 0) #", "support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX',", "Analyze the frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] # Update the expected value", "0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4', 35*12, 40*12,", "are inches, and kips SimpleBeam = FEModel3D() # Add nodes (14 ft =", "# Analyze the frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] # Update the expected", "7.75/2) # 5 kips @ midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) # W8x24 self-weight", "AX = 15.46 BZ = -11.37 BX = 35.45 # The reactions were", "2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) # Check displacements at N3 and N4 correct_displacements", "support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX', -1.5, -1.5)", "# ft frame.add_node('N2', 0, 7.667, 0) # ft frame.add_node('N3', 7.75, 7.667, 0) #", "members frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A) frame.add_member('M2', 'N2', 'N3',", "material and section properties for a W8x24 E = 29000*12**2 # ksf G", "# <NAME> # Problem 5.30 # Units for this model are kips and", "-6.666757, 'RX': -0.032})] for name, values in correct_displacements: with self.subTest(node=name): node = frame.Nodes[name]", "= 100 Iz = 150 J = 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G,", "any # Check reactions at N1 and N6 correct_reactions = [('N1', {'RxnFZ': 11.6877,", "G, Iy, Iz, J, A) frame.add_member('BD', 'B', 'D', E, G, Iy, Iz, J,", "example will be run in the XZ plane to change things up a", "30, 'RxnMX': -1810.0745})] for name, values in correct_reactions: with self.subTest(node=name): node = frame.Nodes[name]", "SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G, Iy, Iz, J, A) # Provide simple supports", "nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30) # Analyze the model frame.analyze()", "= 20 E = 29000 G = 11400 Iy = 100 Iz =", "W8x24 self-weight # Analyze the frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] # Update", "frame.add_member('ED', 'E', 'D', E, G, Iy, Iz, J, A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True)", "one decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2)", "bit. \"\"\" frame = FEModel3D() frame.add_node('A', 0, 0, 0) frame.add_node('B', 0, 0, 24)", "A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iz, Iy, J, A) frame.add_member('M2',", "which portion fails, if any # Check reactions at N1 and N6 correct_reactions", "N6 correct_reactions = [('N1', {'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877,", "it allows us to check the following features: 1. Member loads aligned in", "2) def test_XZ_ptload(self): # A simply supported beam with a point load. #", "seems like it's a little big to be a rounding error alone. #", "A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N3', 'N4',", "test_Kassimali_3_35(self): \"\"\" Tests against Kassimali example 3.35. This example was selected because it", "True, False) # Define material and section properties for a W8x24 E =", "The example will be run in the XZ plane to change things up", "supported beam with a point load. # Units used in this example are", "First Course in the Finite Element Method, 4th Edition # <NAME> # Problem", "'N3', 'N4', E, G, Iy, Iz, J, A) frame.add_member('M4', 'N4', 'N5', E, G,", "Method, 4th Edition # <NAME> # Problem 5.30 # Units for this model", "2) def test_Kassimali_3_35(self): \"\"\" Tests against Kassimali example 3.35. This example was selected", "Iy = 250 Iz = 200 E = 30000 G = 250 A", "output temporarily sys.stdout = StringIO() def tearDown(self): # Reset the print function to", "a W8x24 E = 29000*12**2 # ksf G = 1111200*12**2 # ksf Iy", "'Fy', -0.024, -0.024) # W8x24 self-weight # Analyze the frame frame.analyze() calculated_RZ =", "function to normal sys.stdout = sys.__stdout__ def test_XY_gravity_load(self): # A First Course in", "rxn in correct_reactions: with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal place", "name, values in correct_values: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place", "E, G, Iy, Iz, J, A) frame.add_member('BD', 'B', 'D', E, G, Iy, Iz,", "= 200 E = 30000 G = 250 A = 12 frame.add_member('M1', 'N1',", "'N5', 'N6', E, G, Iy, Iz, J, A) # Add nodal loads frame.add_node_load('N3',", "168) # Add a beam with the following properties: A = 20 E", "expected value to an appropriate precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def", "node_name, rxn in correct_reactions: with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal", "section properties for a W8x24 E = 29000*12**2 # ksf G = 1111200*12**2", "place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self): \"\"\" Tests against Kassimali example", "Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iy, Iz, J, A)", "[('N1', {'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX':", "selected because it allows us to check the following features: 1. Member loads", "'D', E, G, Iy, Iz, J, A) frame.add_member('CE', 'C', 'E', E, G, Iy,", "'D', E, G, Iy, Iz, J, A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True,", "First Course in the Finite Element Method, 4th Edition # Daryl <NAME> #", "us to check the following features: 1. Member loads aligned in global directions.", "-0.024, -0.024) # W8x24 self-weight # Analyze the frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo", "ft frame.add_node('N4', 7.75, 0, 0) # ft # Add supports frame.def_support('N1', True, True,", "loads to the frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) # 5 kips @ midspan", "0, 30*12, 50*12) frame.add_node('N6', 0, 0, 50*12) # Define the supports frame.def_support('N1', True,", "self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self): # A First Course in the Finite Element", "printed output temporarily sys.stdout = StringIO() def tearDown(self): # Reset the print function", "0, 0, 168) # Add a beam with the following properties: A =", "A) frame.add_member('M3', 'N4', 'N3', E, G, Iy, Iz, J, A) # Add loads", "with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal place accuracy requires +/-0.5%", "0) frame.add_node('B', 0, 0, 24) frame.add_node('C', 12, 0, 0) frame.add_node('D', 12, 0, 24)", "= -11.37 BX = 35.45 # The reactions were compared manually to Kassimali's", "frame.add_node('N6', 50*12, 0, 0) # Define the supports frame.def_support('N1', True, True, True, True,", "'FY', -30) frame.add_node_load('N4', 'FY', -30) # Analyze the model frame.analyze() # subTest context", "0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self): # A First Course in the Finite", "self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) #", "-*- coding: utf-8 -*- \"\"\" MIT License Copyright (c) 2020 <NAME>, SE; tamalone1", "Problem 5.30 # Units for this model are kips and inches frame =", "def test_YZ_gravity_load(self): # A First Course in the Finite Element Method, 4th Edition", "# one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0,", "Iy, Iz, J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY',", "2) # Check displacements at N3 and N4 correct_displacements = [('N3', {'DY': -6.666757,", "= 18.3/12**4 # ft^4 Iz = 82.7/12**4 # ft^4 J = 0.346/12**4 #", "1.0, 2) def test_YZ_gravity_load(self): # A First Course in the Finite Element Method,", "A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True)", "1') frame.analyze() AZ = -8.63 AX = 15.46 BZ = -11.37 BX =", "example) J = 250 Iy = 250 Iz = 200 E = 30000", "'FY', -30) # Analyze the model frame.analyze() # subTest context manager prints which", "if any correct_values = [('N1', {'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX':", "11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})] for", "False, True) SimpleBeam.def_support(\"N2\", True, True, True, False, False, False) # Add a point", "40*12, 0) frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6', 50*12, 0, 0) # Define the", "in correct_displacements: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy requires", "Iy, Iz, J, A) frame.add_member('M3', 'N3', 'N4', E, G, Iy, Iz, J, A)", "utf-8 -*- \"\"\" MIT License Copyright (c) 2020 <NAME>, SE; tamalone1 \"\"\" import", "subTest context manager prints which portion fails, if any # Check reactions at", "\"Fy\", 5, 7 * 12) # Analyze the beam SimpleBeam.analyze(False) # Print reactions", "Edition # <NAME> # Problem 5.30 # Units for this model are kips", "ft^4 Iz = 82.7/12**4 # ft^4 J = 0.346/12**4 # ft^4 A =", "frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30) # Analyze the model frame.analyze() # subTest", "E = 29000*12**2 G = 11200*12**2 Iy = 17.3/12**4 Iz = 204/12**4 J", "for name, values in correct_values: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal", "'N2', E, G, Iy, Iz, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy,", "for name, values in correct_reactions: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal", "inches, and kips SimpleBeam = FEModel3D() # Add nodes (14 ft = 168", "7% respectively. That seems like it's a little big to be a rounding", "'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N4', 'N3', E, G, Iy,", "frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4',", "value to an appropriate precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self):", "7 * 12) # Analyze the beam SimpleBeam.analyze(False) # Print reactions at each", "0, 40*12, 15*12) frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6', 0,", "name, values in correct_reactions: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place", "model frame.analyze() # subTest context manager prints which portion fails, if any correct_values", "11200*12**2 Iy = 17.3/12**4 Iz = 204/12**4 J = 0.3/12**4 A = 7.65/12**2", "self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self): frame = FEModel3D()", "A) frame.add_member('CE', 'C', 'E', E, G, Iy, Iz, J, A) frame.add_member('ED', 'E', 'D',", "11400 Iy = 100 Iz = 150 J = 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\",", "G, Iy, Iz, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J,", "math import sys from io import StringIO class Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing", "Member loads aligned in global directions. 2. A member internal hinge. 3. A", "1.0, 2) def test_XZ_ptload(self): # A simply supported beam with a point load.", "50*12, 30*12, 0) frame.add_node('N6', 50*12, 0, 0) # Define the supports frame.def_support('N1', True,", "element method is a little more accurate than the simplified method # Kassimali", "29000*12**2 # ksf G = 1111200*12**2 # ksf Iy = 18.3/12**4 # ft^4", "G = 250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iz, Iy,", "J = 0.346/12**4 # ft^4 A = 5.26/12**2 # in^2 # Define members", "G, Iy, Iz, J, A) frame.add_member('M3', 'N4', 'N3', E, G, Iy, Iz, J,", "and 7% respectively. That seems like it's a little big to be a", "-2.5), ('N2', -2.5)] for node_name, rxn in correct_reactions: with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo", "unittest from PyNite import FEModel3D import math import sys from io import StringIO", "35*12, 40*12, 0) frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6', 50*12, 0, 0) # Define", "manager prints which portion fails, if any # Check reactions at N1 and", "self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) # Check displacements at N3 and N4 correct_displacements =", "0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4', 35*12,", "True, True, True, True, False) frame.def_support('N4', True, True, True, True, True, False) #", "0, 40*12, 35*12) frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6', 0, 0, 50*12) # Define", "Iy, Iz, J, A) # Provide simple supports SimpleBeam.def_support(\"N1\", True, True, True, False,", "Iz, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3',", "True, True, True, False, False, True) SimpleBeam.def_support(\"N2\", True, True, True, False, False, False)", "frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6', 0, 0, 50*12) # Define the supports frame.def_support('N1',", "point load of 5 kips at the midspan of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\",", "0, 30*12, 0) frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5', 50*12,", "frame.add_member('CE', 'C', 'E', E, G, Iy, Iz, J, A) frame.add_member('ED', 'E', 'D', E,", "the same properties in this example) J = 250 Iy = 250 Iz", "model are kips and inches frame = FEModel3D() # Define the nodes frame.add_node('N1',", "portion fails, if any correct_values = [('N1', {'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}),", "= frame.Nodes[name] # Two decimal place accuracy requires +/-0.5% accuracy # one decimal", "True, True, True, False) frame.def_support('N4', True, True, True, True, True, False) # Define", "point load. # Units used in this example are inches, and kips SimpleBeam", "because it allows us to check the following features: 1. Member loads aligned", "0, 0, 24) frame.add_node('C', 12, 0, 0) frame.add_node('D', 12, 0, 24) frame.add_node('E', 24,", "frame.add_node('A', 0, 0, 0) frame.add_node('B', 0, 0, 24) frame.add_node('C', 12, 0, 0) frame.add_node('D',", "'FX', -1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) # from PyNite.Visualization import render_model #", "the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 * 12) # Analyze the beam SimpleBeam.analyze(False)", "# ft frame.add_node('N4', 7.75, 0, 0) # ft # Add supports frame.def_support('N1', True,", "12) E = 29000*12**2 G = 11200*12**2 Iy = 17.3/12**4 Iz = 204/12**4", "at N3 and N4 correct_displacements = [('N3', {'DY': -6.666757, 'RX': 0.032}), ('N4', {'DY':", "a beam with the following properties: A = 20 E = 29000 G", "decimal place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo", "'B', 'D', E, G, Iy, Iz, J, A) frame.add_member('CE', 'C', 'E', E, G,", "Iz, J, A) frame.add_member('M3', 'N3', 'N4', E, G, Iy, Iz, J, A) frame.add_member('M4',", "# subTest context manager prints which portion fails, if any # Check reactions", "frame.add_node_load('N4', 'FY', -30) # Analyze the model frame.analyze() # subTest context manager prints", "tearDown(self): # Reset the print function to normal sys.stdout = sys.__stdout__ def test_XY_gravity_load(self):", "prints which portion fails, if any # Check reactions at N1 and N6", "print function to normal sys.stdout = sys.__stdout__ def test_XY_gravity_load(self): # A First Course", "in apart) SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\", 0, 0, 168) # Add a", "# Add a point load of 5 kips at the midspan of the", "# one decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self): \"\"\" Tests", "= FEModel3D() frame.add_node('A', 0, 0, 0) frame.add_node('B', 0, 0, 24) frame.add_node('C', 12, 0,", "frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5',", "more accurate than the simplified method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1),", "200 E = 30000 G = 250 A = 12 frame.add_member('M1', 'N1', 'N2',", "Define the supports frame.def_support('N1', True, True, True, True, True, True) frame.def_support('N6', True, True,", "decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo", "True) SimpleBeam.def_support(\"N2\", True, True, True, False, False, False) # Add a point load", "requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self): \"\"\" Tests against Kassimali example 3.35.", "+/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo 1']/values['RX'], 1.0, 2) def test_XZ_ptload(self): # A", "''' def setUp(self): # Suppress printed output temporarily sys.stdout = StringIO() def tearDown(self):", "Add a beam with the following properties: A = 20 E = 29000", "plane to change things up a bit. \"\"\" frame = FEModel3D() frame.add_node('A', 0,", "-6.666757, 'RX': 0.032}), ('N4', {'DY': -6.666757, 'RX': -0.032})] for name, values in correct_displacements:", "= FEModel3D() # Add nodes (14 ft = 168 in apart) SimpleBeam.add_node(\"N1\", 0,", "fails, if any # Check reactions at N1 and N6 correct_reactions = [('N1',", "# Likely the finite element method is a little more accurate than the", "frame.add_node('N1', 0, 0, 0) # ft frame.add_node('N2', 0, 7.667, 0) # ft frame.add_node('N3',", "support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE', 'FX',", "'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})] for name, values in", "allows us to check the following features: 1. Member loads aligned in global", "E, G, Iy, Iz, J, A) frame.add_member('M3', 'N4', 'N3', E, G, Iy, Iz,", "test_XY_gravity_load(self): # A First Course in the Finite Element Method, 4th Edition #", "in correct_values: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy requires", "@ midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) # W8x24 self-weight # Analyze the frame", "J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N3',", "Iz, Iy, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A)", "support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ',", "# Reset the print function to normal sys.stdout = sys.__stdout__ def test_XY_gravity_load(self): #", "N4 correct_displacements = [('N3', {'DY': -6.666757, 'RX': 0.032}), ('N4', {'DY': -6.666757, 'RX': -0.032})]", "frame.add_node('D', 12, 0, 24) frame.add_node('E', 24, 0, 12) E = 29000*12**2 G =", "G = 1111200*12**2 # ksf Iy = 18.3/12**4 # ft^4 Iz = 82.7/12**4", "the following properties: A = 20 E = 29000 G = 11400 Iy", "# Add a beam with the following properties: A = 20 E =", "ft^4 A = 5.26/12**2 # in^2 # Define members frame.add_member('M1', 'N1', 'N2', E,", "reactions at each end of the beam correct_reactions = [('N1', -2.5), ('N2', -2.5)]", "Iy, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3',", "= -8.63 AX = 15.46 BZ = -11.37 BX = 35.45 # The", "beam with a point load. # Units used in this example are inches,", "example are inches, and kips SimpleBeam = FEModel3D() # Add nodes (14 ft", "'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N3', 'N4', E, G, Iy,", "= 17.3/12**4 Iz = 204/12**4 J = 0.3/12**4 A = 7.65/12**2 frame.add_member('AC', 'A',", "10% and 7% respectively. That seems like it's a little big to be", "50*12) # Define the supports frame.def_support('N1', True, True, True, True, True, True) frame.def_support('N6',", "accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2)", "at the end of a member. The example will be run in the", "aligned in global directions. 2. A member internal hinge. 3. A point load", "frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) # W8x24 self-weight # Analyze the frame frame.analyze() calculated_RZ", "= 12 frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A) frame.add_member('M2', 'N2',", "'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})] for name, values in", "context manager prints which portion fails, if any correct_values = [('N1', {'RxnFX': 11.6877,", "check the following features: 1. Member loads aligned in global directions. 2. A", "'FX', -1.5, -1.5) # from PyNite.Visualization import render_model # render_model(frame, text_height=0.5, case='Case 1')", "FEModel3D() # Add nodes frame.add_node('N1', 0, 0, 0) # ft frame.add_node('N2', 0, 7.667,", "is a little more accurate than the simplified method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo", "in correct_reactions: with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] # Two decimal place accuracy", "frame.add_node('N4', 7.75, 0, 0) # ft # Add supports frame.def_support('N1', True, True, True,", "accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0,", "0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4', 0, 40*12,", "accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'],", "self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def", "= 11200*12**2 Iy = 17.3/12**4 Iz = 204/12**4 J = 0.3/12**4 A =", "A) frame.add_member('BD', 'B', 'D', E, G, Iy, Iz, J, A) frame.add_member('CE', 'C', 'E',", "True, True) # Create members (all members will have the same properties in", "render_model # render_model(frame, text_height=0.5, case='Case 1') frame.analyze() AZ = -8.63 AX = 15.46", "[('N3', {'DY': -6.666757, 'RX': 0.032}), ('N4', {'DY': -6.666757, 'RX': -0.032})] for name, values", "alone. # Likely the finite element method is a little more accurate than", "-11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})] for name, values in correct_values: with self.subTest(node=name): node", "50*12, 0, 0) # Define the supports frame.def_support('N1', True, True, True, True, True,", "frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E', support_DY=True) frame.def_releases('CE', Rzj=True) frame.add_member_pt_load('AC', 'FZ', 20, 12) frame.add_member_dist_load('CE',", "0) SimpleBeam.add_node(\"N2\", 0, 0, 168) # Add a beam with the following properties:", "class Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing 2D frames. ''' def setUp(self): # Suppress", "in the Finite Element Method, 4th Edition # Daryl <NAME> # Problem 5.30", "correct_reactions: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy requires +/-0.5%", "a rounding error alone. # Likely the finite element method is a little", "Course in the Finite Element Method, 4th Edition # <NAME> # Problem 5.30", "members will have the same properties in this example) J = 250 Iy", "7.667, 0) # ft frame.add_node('N3', 7.75, 7.667, 0) # ft frame.add_node('N4', 7.75, 0,", "the expected value to an appropriate precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2)", "'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})] for name,", "= [('N1', {'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY': 30,", "decimal place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo", "load at the end of a member. The example will be run in", "# W8x24 self-weight # Analyze the frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] #", "at the midspan of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 * 12) #", "# -*- coding: utf-8 -*- \"\"\" MIT License Copyright (c) 2020 <NAME>, SE;", "50*12) frame.add_node('N6', 0, 0, 50*12) # Define the supports frame.def_support('N1', True, True, True,", "'E', 'D', E, G, Iy, Iz, J, A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B',", "-1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) # from PyNite.Visualization import render_model # render_model(frame, text_height=0.5,", "20 E = 29000 G = 11400 Iy = 100 Iz = 150", "2020 <NAME>, SE; tamalone1 \"\"\" import unittest from PyNite import FEModel3D import math", "method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1),", "correct_displacements = [('N3', {'DY': -6.666757, 'RX': 0.032}), ('N4', {'DY': -6.666757, 'RX': -0.032})] for", "'Fy', -5, 7.75/2) # 5 kips @ midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) #", "frame.add_node('E', 24, 0, 12) E = 29000*12**2 G = 11200*12**2 Iy = 17.3/12**4", "J, A) frame.add_member('ED', 'E', 'D', E, G, Iy, Iz, J, A) frame.def_support('A', support_DX=True,", "29000*12**2 G = 11200*12**2 Iy = 17.3/12**4 Iz = 204/12**4 J = 0.3/12**4", "G = 11400 Iy = 100 Iz = 150 J = 250 SimpleBeam.add_member(\"M1\",", "little big to be a rounding error alone. # Likely the finite element", "5.30 # Units for this model are kips and inches frame = FEModel3D()", "A) frame.add_member('M5', 'N5', 'N6', E, G, Iz, Iy, J, A) # Add nodal", "0, 0, 0) frame.add_node('B', 0, 0, 24) frame.add_node('C', 12, 0, 0) frame.add_node('D', 12,", "to check the following features: 1. Member loads aligned in global directions. 2.", "properties in this example) J = 250 Iy = 250 Iz = 200", "E, G, Iy, Iz, J, A) frame.add_member('M3', 'N3', 'N4', E, G, Iy, Iz,", "which portion fails, if any correct_values = [('N1', {'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ':", "1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) # Check displacements at N3 and", "frame.analyze() # subTest context manager prints which portion fails, if any correct_values =", "15*12) frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6', 0, 0, 50*12)", "# ft^4 J = 0.346/12**4 # ft^4 A = 5.26/12**2 # in^2 #", "1111200*12**2 # ksf Iy = 18.3/12**4 # ft^4 Iz = 82.7/12**4 # ft^4", "values in correct_values: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy", "Edition # Daryl <NAME> # Problem 5.30 # Units for this model are", "0) # ft # Add supports frame.def_support('N1', True, True, True, True, True, False)", "= 204/12**4 J = 0.3/12**4 A = 7.65/12**2 frame.add_member('AC', 'A', 'C', E, G,", "nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 0, 40*12, 15*12)", "= 29000*12**2 # ksf G = 1111200*12**2 # ksf Iy = 18.3/12**4 #", "frame = FEModel3D() # Define the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0,", "G, Iy, Iz, J, A) frame.add_member('CE', 'C', 'E', E, G, Iy, Iz, J,", "like it's a little big to be a rounding error alone. # Likely", "True, False) frame.def_support('N4', True, True, True, True, True, False) # Define material and", "-1.5, -1.5) frame.add_member_dist_load('ED', 'FX', -1.5, -1.5) # from PyNite.Visualization import render_model # render_model(frame,", "204/12**4 J = 0.3/12**4 A = 7.65/12**2 frame.add_member('AC', 'A', 'C', E, G, Iy,", "0) frame.add_node('N6', 50*12, 0, 0) # Define the supports frame.def_support('N1', True, True, True,", "-2.5)] for node_name, rxn in correct_reactions: with self.subTest(node=node_name): calculated_reaction = SimpleBeam.Nodes[node_name].RxnFY['Combo 1'] #", "250 Iz = 200 E = 30000 G = 250 A = 12", "Kassimali's solution and the shears were within # 10% and 7% respectively. That", "12, 0, 0) frame.add_node('D', 12, 0, 24) frame.add_node('E', 24, 0, 12) E =", "30*12, 0) frame.add_node('N6', 50*12, 0, 0) # Define the supports frame.def_support('N1', True, True,", "0) frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6', 50*12, 0, 0)", "SimpleBeam.add_node(\"N2\", 0, 0, 168) # Add a beam with the following properties: A", "Define material and section properties for a W8x24 E = 29000*12**2 # ksf", "precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self): # A First Course", "G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iz, Iy, J,", "midspan of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 * 12) # Analyze the", "decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self): \"\"\" Tests against Kassimali", "2) def test_YZ_gravity_load(self): # A First Course in the Finite Element Method, 4th", "12 frame.add_member('M1', 'N1', 'N2', E, G, Iz, Iy, J, A) frame.add_member('M2', 'N2', 'N3',", "= 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self): # A First Course in the", "0, 24) frame.add_node('C', 12, 0, 0) frame.add_node('D', 12, 0, 24) frame.add_node('E', 24, 0,", "40*12, 0) frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6', 50*12, 0,", "nodes frame.add_node('N1', 0, 0, 0) # ft frame.add_node('N2', 0, 7.667, 0) # ft", "import unittest from PyNite import FEModel3D import math import sys from io import", "{'DY': -6.666757, 'RX': -0.032})] for name, values in correct_displacements: with self.subTest(node=name): node =", "J = 0.3/12**4 A = 7.65/12**2 frame.add_member('AC', 'A', 'C', E, G, Iy, Iz,", "portion fails, if any # Check reactions at N1 and N6 correct_reactions =", "J, A) frame.add_member('M3', 'N3', 'N4', E, G, Iy, Iz, J, A) frame.add_member('M4', 'N4',", "1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})] for name, values in correct_reactions:", "<filename>Testing/test_2D_frames.py # -*- coding: utf-8 -*- \"\"\" MIT License Copyright (c) 2020 <NAME>,", "30, 'RxnMZ': 1810.0745})] for name, values in correct_values: with self.subTest(node=name): node = frame.Nodes[name]", "-1810.0745})] for name, values in correct_reactions: with self.subTest(node=name): node = frame.Nodes[name] # Two", "# Units for this model are kips and inches frame = FEModel3D() #", "member internal hinge. 3. A point load at the end of a member.", "= frame.Nodes['N1'].RZ['Combo 1'] # Update the expected value to an appropriate precision expected_RZ", "A = 7.65/12**2 frame.add_member('AC', 'A', 'C', E, G, Iy, Iz, J, A) frame.add_member('BD',", "correct_values: with self.subTest(node=name): node = frame.Nodes[name] # Two decimal place accuracy requires +/-0.5%", "load. # Units used in this example are inches, and kips SimpleBeam =", "True, True, True, True, True, True) # Create members (all members will have", "# Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05)", "E, G, Iy, Iz, J, A) # Provide simple supports SimpleBeam.def_support(\"N1\", True, True,", "if any # Check reactions at N1 and N6 correct_reactions = [('N1', {'RxnFZ':", "Two decimal place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5%", "1'] # Two decimal place accuracy requires +/-0.5% accuracy # one decimal place", "0, 0) SimpleBeam.add_node(\"N2\", 0, 0, 168) # Add a beam with the following", "be run in the XZ plane to change things up a bit. \"\"\"", "frame.add_member('AC', 'A', 'C', E, G, Iy, Iz, J, A) frame.add_member('BD', 'B', 'D', E,", "G, Iy, Iz, J, A) frame.add_member('ED', 'E', 'D', E, G, Iy, Iz, J,", "J = 250 Iy = 250 Iz = 200 E = 30000 G", "0, 30*12, 0) frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5', 0,", "-11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})] for name, values in correct_reactions: with self.subTest(node=name): node", "place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'],", "within # 10% and 7% respectively. That seems like it's a little big", "the frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) # 5 kips @ midspan frame.add_member_dist_load('M2', 'Fy',", "1']/values['RxnFZ'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) # Check", "to Kassimali's solution and the shears were within # 10% and 7% respectively.", "This example was selected because it allows us to check the following features:", "0.346/12**4 # ft^4 A = 5.26/12**2 # in^2 # Define members frame.add_member('M1', 'N1',", "A) frame.add_member('ED', 'E', 'D', E, G, Iy, Iz, J, A) frame.def_support('A', support_DX=True, support_DY=True,", "N3 and N4 correct_displacements = [('N3', {'DY': -6.666757, 'RX': 0.032}), ('N4', {'DY': -6.666757,", "0, 0, 50*12) # Define the supports frame.def_support('N1', True, True, True, True, True,", "'RxnMZ': 1810.0745})] for name, values in correct_values: with self.subTest(node=name): node = frame.Nodes[name] #", "text_height=0.5, case='Case 1') frame.analyze() AZ = -8.63 AX = 15.46 BZ = -11.37", "frame.def_support('N6', True, True, True, True, True, True) # Create members (all members will", "SimpleBeam.analyze(False) # Print reactions at each end of the beam correct_reactions = [('N1',", "Units for this model are kips and inches frame = FEModel3D() # Define", "1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self): frame", "0.3/12**4 A = 7.65/12**2 frame.add_member('AC', 'A', 'C', E, G, Iy, Iz, J, A)", "7.65/12**2 frame.add_member('AC', 'A', 'C', E, G, Iy, Iz, J, A) frame.add_member('BD', 'B', 'D',", "Iz = 204/12**4 J = 0.3/12**4 A = 7.65/12**2 frame.add_member('AC', 'A', 'C', E,", "and the shears were within # 10% and 7% respectively. That seems like", "E, G, Iy, Iz, J, A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True,", "'N4', 'N5', E, G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G,", "# Define material and section properties for a W8x24 E = 29000*12**2 #", "0, 0) # ft # Add supports frame.def_support('N1', True, True, True, True, True,", "tamalone1 \"\"\" import unittest from PyNite import FEModel3D import math import sys from", "simplified method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX -", "SimpleBeam.def_support(\"N1\", True, True, True, False, False, True) SimpleBeam.def_support(\"N2\", True, True, True, False, False,", "PyNite.Visualization import render_model # render_model(frame, text_height=0.5, case='Case 1') frame.analyze() AZ = -8.63 AX", "Suppress printed output temporarily sys.stdout = StringIO() def tearDown(self): # Reset the print", "temporarily sys.stdout = StringIO() def tearDown(self): # Reset the print function to normal", "the print function to normal sys.stdout = sys.__stdout__ def test_XY_gravity_load(self): # A First", "solution and the shears were within # 10% and 7% respectively. That seems", "* 12) # Analyze the beam SimpleBeam.analyze(False) # Print reactions at each end", "J, A) # Add loads to the frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) #", "the following features: 1. Member loads aligned in global directions. 2. A member", "'N1', 'N2', E, G, Iz, Iy, J, A) frame.add_member('M2', 'N2', 'N3', E, G,", "'N5', E, G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iz,", "150 J = 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G, Iy, Iz, J, A)", "Define the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 15*12,", "'RxnFY': 30, 'RxnMZ': 1810.0745})] for name, values in correct_values: with self.subTest(node=name): node =", "coding: utf-8 -*- \"\"\" MIT License Copyright (c) 2020 <NAME>, SE; tamalone1 \"\"\"", "Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iz, Iy, J, A) #", "test_YZ_gravity_load(self): # A First Course in the Finite Element Method, 4th Edition #", "Add nodes (14 ft = 168 in apart) SimpleBeam.add_node(\"N1\", 0, 0, 0) SimpleBeam.add_node(\"N2\",", "'N4', 'N3', E, G, Iy, Iz, J, A) # Add loads to the", "'N6', E, G, Iy, Iz, J, A) # Add nodal loads frame.add_node_load('N3', 'FY',", "True, True, True) # Create members (all members will have the same properties", "frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) # 5 kips @ midspan frame.add_member_dist_load('M2', 'Fy', -0.024,", "at each end of the beam correct_reactions = [('N1', -2.5), ('N2', -2.5)] for", "# Problem 5.30 # Units for this model are kips and inches frame", "example was selected because it allows us to check the following features: 1.", "('N6', {'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})] for name, values in correct_reactions: with", "# Define the supports frame.def_support('N1', True, True, True, True, True, True) frame.def_support('N6', True,", "to the frame frame.add_member_pt_load('M2', 'Fy', -5, 7.75/2) # 5 kips @ midspan frame.add_member_dist_load('M2',", "1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ - 1), 0.7) self.assertLess(abs(frame.Nodes['B'].RxnFX['Combo", "same properties in this example) J = 250 Iy = 250 Iz =", "E, G, Iz, Iy, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz,", "end of the beam correct_reactions = [('N1', -2.5), ('N2', -2.5)] for node_name, rxn", "expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self): # A First Course in", "G, Iy, Iz, J, A) frame.add_member('M3', 'N3', 'N4', E, G, Iy, Iz, J,", "def test_Kassimali_3_35(self): \"\"\" Tests against Kassimali example 3.35. This example was selected because", "1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self): frame = FEModel3D() #", "+/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2)", "+/-5% self.assertAlmostEqual(calculated_reaction/rxn, 1.0, 2) def test_Kassimali_3_35(self): \"\"\" Tests against Kassimali example 3.35. This", "True, True, True, True, True) # Create members (all members will have the", "'N4', E, G, Iy, Iz, J, A) frame.add_member('M4', 'N4', 'N5', E, G, Iy,", "2) self.assertAlmostEqual(node.RxnFY['Combo 1']/values['RxnFY'], 1.0, 2) self.assertAlmostEqual(node.RxnMX['Combo 1']/values['RxnMX'], 1.0, 2) # Check displacements at", "\"\"\" Tests against Kassimali example 3.35. This example was selected because it allows", "# Add nodes (14 ft = 168 in apart) SimpleBeam.add_node(\"N1\", 0, 0, 0)", "That seems like it's a little big to be a rounding error alone.", "= 15.46 BZ = -11.37 BX = 35.45 # The reactions were compared", "# ksf G = 1111200*12**2 # ksf Iy = 18.3/12**4 # ft^4 Iz", "Analyze the beam SimpleBeam.analyze(False) # Print reactions at each end of the beam", "A) frame.add_member('M4', 'N4', 'N5', E, G, Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6',", "5, 7 * 12) # Analyze the beam SimpleBeam.analyze(False) # Print reactions at", "False) # Add a point load of 5 kips at the midspan of", "('N6', {'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})] for name, values in correct_values: with", "3.35. This example was selected because it allows us to check the following", "'C', 'E', E, G, Iy, Iz, J, A) frame.add_member('ED', 'E', 'D', E, G,", "self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self): frame = FEModel3D() # Add nodes frame.add_node('N1',", "30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})] for name, values", "[('N1', {'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ':", "(c) 2020 <NAME>, SE; tamalone1 \"\"\" import unittest from PyNite import FEModel3D import", "= 0.346/12**4 # ft^4 A = 5.26/12**2 # in^2 # Define members frame.add_member('M1',", "'N3', E, G, Iy, Iz, J, A) # Add loads to the frame", "+/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.DY['Combo 1']/values['DY'], 1.0, 2) self.assertAlmostEqual(node.RX['Combo", "beam with the following properties: A = 20 E = 29000 G =", "-30) frame.add_node_load('N4', 'FY', -30) # Analyze the model frame.analyze() # subTest context manager", "Iy, Iz, J, A) frame.def_support('A', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('B', support_DX=True, support_DY=True, support_DZ=True) frame.def_support('E',", "at N1 and N6 correct_reactions = [('N1', {'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}),", "little more accurate than the simplified method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ -", "# in^2 # Define members frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J,", "Iz, J, A) # Provide simple supports SimpleBeam.def_support(\"N1\", True, True, True, False, False,", "from PyNite.Visualization import render_model # render_model(frame, text_height=0.5, case='Case 1') frame.analyze() AZ = -8.63", "True, True, True, True, True, True) frame.def_support('N6', True, True, True, True, True, True)", "# render_model(frame, text_height=0.5, case='Case 1') frame.analyze() AZ = -8.63 AX = 15.46 BZ", "Test_2D_Frame(unittest.TestCase): ''' Tests of analyzing 2D frames. ''' def setUp(self): # Suppress printed", "Iz, J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30)", "30*12, 50*12) frame.add_node('N6', 0, 0, 50*12) # Define the supports frame.def_support('N1', True, True,", "a point load of 5 kips at the midspan of the beam SimpleBeam.add_member_pt_load(\"M1\",", "following properties: A = 20 E = 29000 G = 11400 Iy =", "members (all members will have the same properties in this example) J =", "Iy, Iz, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A)", "0, 0, 0) SimpleBeam.add_node(\"N2\", 0, 0, 168) # Add a beam with the", "case='Case 1') frame.analyze() AZ = -8.63 AX = 15.46 BZ = -11.37 BX", "against Kassimali example 3.35. This example was selected because it allows us to", "Create members (all members will have the same properties in this example) J", "4th Edition # <NAME> # Problem 5.30 # Units for this model are", "Iz, J, A) frame.add_member('CE', 'C', 'E', E, G, Iy, Iz, J, A) frame.add_member('ED',", "prints which portion fails, if any correct_values = [('N1', {'RxnFX': 11.6877, 'RxnFY': 30,", "reactions at N1 and N6 correct_reactions = [('N1', {'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX':", "beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 * 12) # Analyze the beam SimpleBeam.analyze(False) #", "Add nodes frame.add_node('N1', 0, 0, 0) # ft frame.add_node('N2', 0, 7.667, 0) #", "82.7/12**4 # ft^4 J = 0.346/12**4 # ft^4 A = 5.26/12**2 # in^2", "0) frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5', 50*12, 30*12, 0)", "respectively. That seems like it's a little big to be a rounding error", "import render_model # render_model(frame, text_height=0.5, case='Case 1') frame.analyze() AZ = -8.63 AX =", "Kassimali example 3.35. This example was selected because it allows us to check", "and N4 correct_displacements = [('N3', {'DY': -6.666757, 'RX': 0.032}), ('N4', {'DY': -6.666757, 'RX':", "frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 15*12, 40*12, 0) frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5',", "the simplified method # Kassimali uses. self.assertLess(abs(frame.Nodes['A'].RxnFZ['Combo 1']/AZ - 1), 0.1) self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX", "# Update the expected value to an appropriate precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ,", "= 250 Iz = 200 E = 30000 G = 250 A =", "= 150 J = 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G, Iy, Iz, J,", "'RX': 0.032}), ('N4', {'DY': -6.666757, 'RX': -0.032})] for name, values in correct_displacements: with", "-*- \"\"\" MIT License Copyright (c) 2020 <NAME>, SE; tamalone1 \"\"\" import unittest", "True, True, False, False, False) # Add a point load of 5 kips", "A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A) frame.add_member('M2',", "manually to Kassimali's solution and the shears were within # 10% and 7%", "'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N3', 'N4', E, G,", "J, A) # Provide simple supports SimpleBeam.def_support(\"N1\", True, True, True, False, False, True)", "frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J, A) frame.add_member('M3', 'N4', 'N3', E,", "-11.37 BX = 35.45 # The reactions were compared manually to Kassimali's solution", "sys.stdout = StringIO() def tearDown(self): # Reset the print function to normal sys.stdout", "properties: A = 20 E = 29000 G = 11400 Iy = 100", "of a member. The example will be run in the XZ plane to", "# Analyze the beam SimpleBeam.analyze(False) # Print reactions at each end of the", "= FEModel3D() # Add nodes frame.add_node('N1', 0, 0, 0) # ft frame.add_node('N2', 0,", "'RX': -0.032})] for name, values in correct_displacements: with self.subTest(node=name): node = frame.Nodes[name] #", "G, Iy, Iz, J, A) # Provide simple supports SimpleBeam.def_support(\"N1\", True, True, True,", "11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6', {'RxnFX': -11.6877, 'RxnFY': 30, 'RxnMZ': 1810.0745})] for", "ft frame.add_node('N3', 7.75, 7.667, 0) # ft frame.add_node('N4', 7.75, 0, 0) # ft", "# ft^4 Iz = 82.7/12**4 # ft^4 J = 0.346/12**4 # ft^4 A", "2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self): frame = FEModel3D() # Add nodes", "5 kips @ midspan frame.add_member_dist_load('M2', 'Fy', -0.024, -0.024) # W8x24 self-weight # Analyze", "point load at the end of a member. The example will be run", "('N4', {'DY': -6.666757, 'RX': -0.032})] for name, values in correct_displacements: with self.subTest(node=name): node", "of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5, 7 * 12) # Analyze the beam", "250 A = 12 frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A)", "N1 and N6 correct_reactions = [('N1', {'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6',", "{'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY': 30, 'RxnMX': -1810.0745})]", "# Add nodes frame.add_node('N1', 0, 0, 0) # ft frame.add_node('N2', 0, 7.667, 0)", "Finite Element Method, 4th Edition # Daryl <NAME> # Problem 5.30 # Units", "1810.0745})] for name, values in correct_values: with self.subTest(node=name): node = frame.Nodes[name] # Two", "True, True, True, True, True, False) # Define material and section properties for", "E, G, Iy, Iz, J, A) # Add loads to the frame frame.add_member_pt_load('M2',", "frame.def_support('N1', True, True, True, True, True, True) frame.def_support('N6', True, True, True, True, True,", "Units used in this example are inches, and kips SimpleBeam = FEModel3D() #", "Iy = 100 Iz = 150 J = 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E,", "= 82.7/12**4 # ft^4 J = 0.346/12**4 # ft^4 A = 5.26/12**2 #", "4th Edition # Daryl <NAME> # Problem 5.30 # Units for this model", "with the following properties: A = 20 E = 29000 G = 11400", "12, 0, 24) frame.add_node('E', 24, 0, 12) E = 29000*12**2 G = 11200*12**2", "were within # 10% and 7% respectively. That seems like it's a little", "# 10% and 7% respectively. That seems like it's a little big to", "A) # Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY', -30) # Analyze", "# ksf Iy = 18.3/12**4 # ft^4 Iz = 82.7/12**4 # ft^4 J", "A point load at the end of a member. The example will be", "2. A member internal hinge. 3. A point load at the end of", "self.assertLess(abs(frame.Nodes['A'].RxnFX['Combo 1']/AX - 1), 0.05) self.assertLess(abs(frame.Nodes['B'].RxnFZ['Combo 1']/BZ - 1), 0.7) self.assertLess(abs(frame.Nodes['B'].RxnFX['Combo 1']/BX -", "+/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'], 1.0, 2) self.assertAlmostEqual(node.RxnFY['Combo", "correct_reactions = [('N1', {'RxnFZ': 11.6877, 'RxnFY': 30, 'RxnMX': 1810.0745}), ('N6', {'RxnFZ': -11.6877, 'RxnFY':", "Iz, J, A) frame.add_member('BD', 'B', 'D', E, G, Iy, Iz, J, A) frame.add_member('CE',", "# from PyNite.Visualization import render_model # render_model(frame, text_height=0.5, case='Case 1') frame.analyze() AZ =", "Iy, Iz, J, A) frame.add_member('M5', 'N5', 'N6', E, G, Iz, Iy, J, A)", "A = 5.26/12**2 # in^2 # Define members frame.add_member('M1', 'N1', 'N2', E, G,", "the frame frame.analyze() calculated_RZ = frame.Nodes['N1'].RZ['Combo 1'] # Update the expected value to", "E, G, Iy, Iz, J, A) frame.add_member('ED', 'E', 'D', E, G, Iy, Iz,", "frame.add_node('N3', 0, 40*12, 15*12) frame.add_node('N4', 0, 40*12, 35*12) frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6',", "J = 250 SimpleBeam.add_member(\"M1\", \"N1\", \"N2\", E, G, Iy, Iz, J, A) #", "place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFX['Combo 1']/values['RxnFX'],", "be a rounding error alone. # Likely the finite element method is a", "def tearDown(self): # Reset the print function to normal sys.stdout = sys.__stdout__ def", "method is a little more accurate than the simplified method # Kassimali uses.", "# Suppress printed output temporarily sys.stdout = StringIO() def tearDown(self): # Reset the", "in^2 # Define members frame.add_member('M1', 'N1', 'N2', E, G, Iy, Iz, J, A)", "Iz, Iy, J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30) frame.add_node_load('N4', 'FY',", "E, G, Iy, Iz, J, A) frame.add_member('M4', 'N4', 'N5', E, G, Iy, Iz,", "A) frame.add_member('M3', 'N3', 'N4', E, G, Iy, Iz, J, A) frame.add_member('M4', 'N4', 'N5',", "1.0, 2) self.assertAlmostEqual(node.RxnMZ['Combo 1']/values['RxnMZ'], 1.0, 2) def test_XY_member_ptload(self): frame = FEModel3D() # Add", "to an appropriate precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self): #", "supports frame.def_support('N1', True, True, True, True, True, True) frame.def_support('N6', True, True, True, True,", "40*12, 35*12) frame.add_node('N5', 0, 30*12, 50*12) frame.add_node('N6', 0, 0, 50*12) # Define the", "fails, if any correct_values = [('N1', {'RxnFX': 11.6877, 'RxnFY': 30, 'RxnMZ': -1810.0745}), ('N6',", "G, Iz, Iy, J, A) frame.add_member('M2', 'N2', 'N3', E, G, Iy, Iz, J,", "shears were within # 10% and 7% respectively. That seems like it's a", "G, Iy, Iz, J, A) frame.add_member('M4', 'N4', 'N5', E, G, Iy, Iz, J,", "the supports frame.def_support('N1', True, True, True, True, True, True) frame.def_support('N6', True, True, True,", "an appropriate precision expected_RZ = 0.00022794540510395617 self.assertAlmostEqual(calculated_RZ/expected_RZ, 1.0, 2) def test_YZ_gravity_load(self): # A", "supports frame.def_support('N1', True, True, True, True, True, False) frame.def_support('N4', True, True, True, True,", "SimpleBeam = FEModel3D() # Add nodes (14 ft = 168 in apart) SimpleBeam.add_node(\"N1\",", "frame = FEModel3D() frame.add_node('A', 0, 0, 0) frame.add_node('B', 0, 0, 24) frame.add_node('C', 12,", "load of 5 kips at the midspan of the beam SimpleBeam.add_member_pt_load(\"M1\", \"Fy\", 5,", "False, False, True) SimpleBeam.def_support(\"N2\", True, True, True, False, False, False) # Add a", "0, 0) # Define the supports frame.def_support('N1', True, True, True, True, True, True)", "15*12, 40*12, 0) frame.add_node('N4', 35*12, 40*12, 0) frame.add_node('N5', 50*12, 30*12, 0) frame.add_node('N6', 50*12,", "a bit. \"\"\" frame = FEModel3D() frame.add_node('A', 0, 0, 0) frame.add_node('B', 0, 0,", "kips and inches frame = FEModel3D() # Define the nodes frame.add_node('N1', 0, 0,", "250 Iy = 250 Iz = 200 E = 30000 G = 250", "E, G, Iy, Iz, J, A) # Add nodal loads frame.add_node_load('N3', 'FY', -30)", "example 3.35. This example was selected because it allows us to check the", "XZ plane to change things up a bit. \"\"\" frame = FEModel3D() frame.add_node('A',", "accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(node.RxnFZ['Combo 1']/values['RxnFZ'], 1.0,", "decimal place accuracy requires +/-0.5% accuracy # one decimal place requires +/-5% self.assertAlmostEqual(calculated_reaction/rxn,", "the nodes frame.add_node('N1', 0, 0, 0) frame.add_node('N2', 0, 30*12, 0) frame.add_node('N3', 15*12, 40*12,", "-1.5) # from PyNite.Visualization import render_model # render_model(frame, text_height=0.5, case='Case 1') frame.analyze() AZ", "Finite Element Method, 4th Edition # <NAME> # Problem 5.30 # Units for", "G, Iy, Iz, J, A) # Add loads to the frame frame.add_member_pt_load('M2', 'Fy',", "True, True, False) frame.def_support('N4', True, True, True, True, True, False) # Define material" ]
[]
[ "images = images + sorted(glob(join(imagesubject, '*'))) segmasks = segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*')))", "if (self.split == 'train') else 1.0 self.collect_ids() def get_subject_data(self, subjectpath): imagepath = join(subjectpath,", "glob from os.path import join, exists import json from cycada.data.data_loader import register_data_params, register_dataset_obj", "else 1.0 self.collect_ids() def get_subject_data(self, subjectpath): imagepath = join(subjectpath, 'images') imagesubjects = glob(join(imagepath,", "import default_timer as timer from datetime import timedelta # Parallelize the for loop", "results: self.images = self.images + result[0] self.segmasks = self.segmasks + result[1] def img_path(self,", "= segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images, segmasks] def collect_ids(self): from timeit", "= timer() print(timedelta(seconds=end-start)) for result in results: self.images = self.images + result[0] self.segmasks", "Index of image to return Output: Image in the format NCHW - normalized", "= config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): # root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu", "= images + sorted(glob(join(imagesubject, '*'))) segmasks = segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return", "1s only! target = torch.Tensor(target.transpose(2, 0, 1)).mean(dim=0) return img, target def __len__(self): return", "np.expand_dims(img, axis = 2) img = np.concatenate((img_temp, img_temp, img_temp), axis=2) else: img =", "to return Output: Image in the format NCHW - normalized Segmask in the", "''' Input: Index of image to return Output: Image in the format NCHW", "self.segmasks + result[1] def img_path(self, index): return self.images[index] def label_path(self, index): return self.segmasks[index]", "root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root, params, num_cls=2, split='train', remap_labels=True, transform=None,", "from cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing as mp from joblib import Parallel, delayed", "WARNING: Original code did mean normalization, we did min max normalization. Change if", "config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction = config[\"fraction\"]", "= name self.runs = ['run0'] self.transform = transform self.images = [] self.segmasks =", "self.transform = transform self.images = [] self.segmasks = [] self.target_transform = target_transform self.data_path", "split='train', remap_labels=True, transform=None, target_transform=None): self.root = root self.split = split self.remap_labels = remap_labels", "return self.segmasks[index] def __iter__(self): return self ''' Input: Index of image to return", "with open(join(\"dataset_configs\", name+\".json\"), 'r') as f: config = json.load(f) self.num_channels = config[\"num_channels\"] self.image_size", "self.runs: runpath = join(self.data_path, run) subjects = sorted(glob(join(runpath, '*'))) start = timer() results", "self.segmasks = [] self.target_transform = target_transform self.data_path = join(self.root, self.split) self.num_cls = num_cls", "def __iter__(self): return self ''' Input: Index of image to return Output: Image", "glob import glob from os.path import join, exists import json from cycada.data.data_loader import", "def __init__(self, name): config = None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r') as", "exists import json from cycada.data.data_loader import register_data_params, register_dataset_obj from cycada.data.data_loader import DatasetParams import", "of image to return Output: Image in the format NCHW - normalized Segmask", "because they are class labels ''' def __getitem__(self, index): img_path = self.img_path(index) label_path", "1 # WARNING: Original code did mean normalization, we did min max normalization.", "join(self.data_path, run) subjects = sorted(glob(join(runpath, '*'))) start = timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for", "'segmasks'), '*'))) return [images, segmasks] def collect_ids(self): from timeit import default_timer as timer", "NCHW format and normalize to -1 to 1 # WARNING: Original code did", "DatasetParams import cv2 from cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing as mp from joblib", "root, params, num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None): self.root = root self.split = split", "False def __init__(self, name): config = None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r')", "label_path = self.label_path(index) img = None if self.bw_flag: img = cv2.imread(img_path, 0) img_temp", "= config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal') class", "black = False def __init__(self, name): config = None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\",", "= self.segmasks + result[1] def img_path(self, index): return self.images[index] def label_path(self, index): return", "return Output: Image in the format NCHW - normalized Segmask in the format", "= Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects) end = timer() print(timedelta(seconds=end-start)) for result in", "Image in the format NCHW - normalized Segmask in the format NHW (channels", "import json from cycada.data.data_loader import register_data_params, register_dataset_obj from cycada.data.data_loader import DatasetParams import cv2", "result[0] self.segmasks = self.segmasks + result[1] def img_path(self, index): return self.images[index] def label_path(self,", "import glob from os.path import join, exists import json from cycada.data.data_loader import register_data_params,", "= target_transform self.data_path = join(self.root, self.split) self.num_cls = num_cls self.size = (params.image_size, params.image_size)", "timer from datetime import timedelta # Parallelize the for loop for run in", "of 0s and 1s only! target = torch.Tensor(target.transpose(2, 0, 1)).mean(dim=0) return img, target", "= params.black self.seed = 255 self.fraction = params.fraction if (self.split == 'train') else", "config = json.load(f) self.num_channels = config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls", "2) img = np.concatenate((img_temp, img_temp, img_temp), axis=2) else: img = cv2.imread(img_path) target =", "old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be made up of 0s", "timeit import default_timer as timer from datetime import timedelta # Parallelize the for", "to 1 # WARNING: Original code did mean normalization, we did min max", "__getitem__(self, index): img_path = self.img_path(index) label_path = self.label_path(index) img = None if self.bw_flag:", "format NCHW - normalized Segmask in the format NHW (channels = 1 is", "img = np.concatenate((img_temp, img_temp, img_temp), axis=2) else: img = cv2.imread(img_path) target = cv2.imread(label_path)", "if necessary to old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be made", "img = cv2.imread(img_path) target = cv2.imread(label_path) img = cv2.resize(img, self.size) target = cv2.resize(target,", "= remap_labels self.name = name self.runs = ['run0'] self.transform = transform self.images =", "as np import scipy.io import torch import os from torch.utils.data import Dataset from", "import os from torch.utils.data import Dataset from glob import glob from os.path import", "import join, exists import json from cycada.data.data_loader import register_data_params, register_dataset_obj from cycada.data.data_loader import", "config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset):", "[] self.segmasks = [] self.target_transform = target_transform self.data_path = join(self.root, self.split) self.num_cls =", "understood) - not normalized because they are class labels ''' def __getitem__(self, index):", "= root self.split = split self.remap_labels = remap_labels self.name = name self.runs =", "index): return self.images[index] def label_path(self, index): return self.segmasks[index] def __iter__(self): return self '''", "= [] self.target_transform = target_transform self.data_path = join(self.root, self.split) self.num_cls = num_cls self.size", "from cycada.data.data_loader import DatasetParams import cv2 from cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing as", "'*'))) segmasks = segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images, segmasks] def collect_ids(self):", "for result in results: self.images = self.images + result[0] self.segmasks = self.segmasks +", "NCHW - normalized Segmask in the format NHW (channels = 1 is understood)", "subjects) end = timer() print(timedelta(seconds=end-start)) for result in results: self.images = self.images +", "params.fraction if (self.split == 'train') else 1.0 self.collect_ids() def get_subject_data(self, subjectpath): imagepath =", "= 3 image_size = 256 mean = 0.5 num_cls = 2 fraction =", "the for loop for run in self.runs: runpath = join(self.data_path, run) subjects =", "class labels ''' def __getitem__(self, index): img_path = self.img_path(index) label_path = self.label_path(index) img", "as mp from joblib import Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels = 3", "def img_path(self, index): return self.images[index] def label_path(self, index): return self.segmasks[index] def __iter__(self): return", "3 image_size = 256 mean = 0.5 num_cls = 2 fraction = 1.0", "(self.split == 'train') else 1.0 self.collect_ids() def get_subject_data(self, subjectpath): imagepath = join(subjectpath, 'images')", "= config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black =", "end = timer() print(timedelta(seconds=end-start)) for result in results: self.images = self.images + result[0]", "self.size = (params.image_size, params.image_size) self.bw_flag = params.black self.seed = 255 self.fraction = params.fraction", "Parallelize the for loop for run in self.runs: runpath = join(self.data_path, run) subjects", "class SurrealParams(DatasetParams): num_channels = 3 image_size = 256 mean = 0.5 num_cls =", "as timer from datetime import timedelta # Parallelize the for loop for run", "config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): # root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def", "to NCHW format and normalize to -1 to 1 # WARNING: Original code", "= 1 is understood) - not normalized because they are class labels '''", "self.target_transform = target_transform self.data_path = join(self.root, self.split) self.num_cls = num_cls self.size = (params.image_size,", "import cv2 from cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing as mp from joblib import", "subjects = sorted(glob(join(runpath, '*'))) start = timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in", "['run0'] self.transform = transform self.images = [] self.segmasks = [] self.target_transform = target_transform", "label_path(self, index): return self.segmasks[index] def __iter__(self): return self ''' Input: Index of image", "start = timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects) end = timer()", "they are class labels ''' def __getitem__(self, index): img_path = self.img_path(index) label_path =", "self.bw_flag = params.black self.seed = 255 self.fraction = params.fraction if (self.split == 'train')", "= None if self.bw_flag: img = cv2.imread(img_path, 0) img_temp = np.expand_dims(img, axis =", "self.remap_labels = remap_labels self.name = name self.runs = ['run0'] self.transform = transform self.images", "- normalized Segmask in the format NHW (channels = 1 is understood) -", "return self.images[index] def label_path(self, index): return self.segmasks[index] def __iter__(self): return self ''' Input:", "SurrealLoader(Dataset): # root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root, params, num_cls=2, split='train',", "cv2.imread(img_path) target = cv2.imread(label_path) img = cv2.resize(img, self.size) target = cv2.resize(target, self.size) #", "num_cls self.size = (params.image_size, params.image_size) self.bw_flag = params.black self.seed = 255 self.fraction =", "self.image_size = config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform", "cv2.imread(label_path) img = cv2.resize(img, self.size) target = cv2.resize(target, self.size) # Convert to NCHW", "= cv2.imread(img_path) target = cv2.imread(label_path) img = cv2.resize(img, self.size) target = cv2.resize(target, self.size)", "remap_labels=True, transform=None, target_transform=None): self.root = root self.split = split self.remap_labels = remap_labels self.name", "cycada.data.data_loader import register_data_params, register_dataset_obj from cycada.data.data_loader import DatasetParams import cv2 from cycada.data.util import", "= cv2.resize(target, self.size) # Convert to NCHW format and normalize to -1 to", "= sorted(glob(join(runpath, '*'))) start = timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects)", "register_dataset_obj from cycada.data.data_loader import DatasetParams import cv2 from cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing", "are class labels ''' def __getitem__(self, index): img_path = self.img_path(index) label_path = self.label_path(index)", "= [] self.segmasks = [] self.target_transform = target_transform self.data_path = join(self.root, self.split) self.num_cls", "'train') else 1.0 self.collect_ids() def get_subject_data(self, subjectpath): imagepath = join(subjectpath, 'images') imagesubjects =", "print(timedelta(seconds=end-start)) for result in results: self.images = self.images + result[0] self.segmasks = self.segmasks", "target_transform=None): self.root = root self.split = split self.remap_labels = remap_labels self.name = name", "and normalize to -1 to 1 # WARNING: Original code did mean normalization,", "open(join(\"dataset_configs\", name+\".json\"), 'r') as f: config = json.load(f) self.num_channels = config[\"num_channels\"] self.image_size =", "if self.bw_flag: img = cv2.imread(img_path, 0) img_temp = np.expand_dims(img, axis = 2) img", "target = cv2.imread(label_path) img = cv2.resize(img, self.size) target = cv2.resize(target, self.size) # Convert", "Dataset from glob import glob from os.path import join, exists import json from", "join(subjectpath, 'images') imagesubjects = glob(join(imagepath, '*')) images = [] segmasks = [] for", "= glob(join(imagepath, '*')) images = [] segmasks = [] for imagesubject in imagesubjects:", "num_cls = 2 fraction = 1.0 target_transform = None black = False def", "self.split) self.num_cls = num_cls self.size = (params.image_size, params.image_size) self.bw_flag = params.black self.seed =", "= np.expand_dims(img, axis = 2) img = np.concatenate((img_temp, img_temp, img_temp), axis=2) else: img", "class SurrealLoader(Dataset): # root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root, params, num_cls=2,", "def __getitem__(self, index): img_path = self.img_path(index) label_path = self.label_path(index) img = None if", "Output: Image in the format NCHW - normalized Segmask in the format NHW", "self.num_channels = config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction", "join(self.root, self.split) self.num_cls = num_cls self.size = (params.image_size, params.image_size) self.bw_flag = params.black self.seed", "in results: self.images = self.images + result[0] self.segmasks = self.segmasks + result[1] def", "format and normalize to -1 to 1 # WARNING: Original code did mean", "be made up of 0s and 1s only! target = torch.Tensor(target.transpose(2, 0, 1)).mean(dim=0)", "root self.split = split self.remap_labels = remap_labels self.name = name self.runs = ['run0']", "for run in self.runs: runpath = join(self.data_path, run) subjects = sorted(glob(join(runpath, '*'))) start", "runpath = join(self.data_path, run) subjects = sorted(glob(join(runpath, '*'))) start = timer() results =", "cv2.resize(img, self.size) target = cv2.resize(target, self.size) # Convert to NCHW format and normalize", "256 mean = 0.5 num_cls = 2 fraction = 1.0 target_transform = None", "normalization. Change if necessary to old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must", "__iter__(self): return self ''' Input: Index of image to return Output: Image in", "return self ''' Input: Index of image to return Output: Image in the", "fraction = 1.0 target_transform = None black = False def __init__(self, name): config", "Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels = 3 image_size = 256 mean =", "scipy.io import torch import os from torch.utils.data import Dataset from glob import glob", "we did min max normalization. Change if necessary to old one. img =", "= np.concatenate((img_temp, img_temp, img_temp), axis=2) else: img = cv2.imread(img_path) target = cv2.imread(label_path) img", "img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be made up of 0s and 1s", "''' def __getitem__(self, index): img_path = self.img_path(index) label_path = self.label_path(index) img = None", "= 2) img = np.concatenate((img_temp, img_temp, img_temp), axis=2) else: img = cv2.imread(img_path) target", "as f: config = json.load(f) self.num_channels = config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean =", "= num_cls self.size = (params.image_size, params.image_size) self.bw_flag = params.black self.seed = 255 self.fraction", "self.images = self.images + result[0] self.segmasks = self.segmasks + result[1] def img_path(self, index):", "self.collect_ids() def get_subject_data(self, subjectpath): imagepath = join(subjectpath, 'images') imagesubjects = glob(join(imagepath, '*')) images", "up of 0s and 1s only! target = torch.Tensor(target.transpose(2, 0, 1)).mean(dim=0) return img,", "name): config = None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r') as f: config", "cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing as mp from joblib import Parallel, delayed @register_data_params('surreal')", "img = cv2.resize(img, self.size) target = cv2.resize(target, self.size) # Convert to NCHW format", "self.size) target = cv2.resize(target, self.size) # Convert to NCHW format and normalize to", "one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be made up of 0s and", "cv2 from cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing as mp from joblib import Parallel,", "for imagesubject in imagesubjects: images = images + sorted(glob(join(imagesubject, '*'))) segmasks = segmasks", "and 1s only! target = torch.Tensor(target.transpose(2, 0, 1)).mean(dim=0) return img, target def __len__(self):", "images + sorted(glob(join(imagesubject, '*'))) segmasks = segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images,", "import Dataset from glob import glob from os.path import join, exists import json", "mean = 0.5 num_cls = 2 fraction = 1.0 target_transform = None black", "Convert to NCHW format and normalize to -1 to 1 # WARNING: Original", "self.segmasks[index] def __iter__(self): return self ''' Input: Index of image to return Output:", "axis=2) else: img = cv2.imread(img_path) target = cv2.imread(label_path) img = cv2.resize(img, self.size) target", "import multiprocessing as mp from joblib import Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels", "= transform self.images = [] self.segmasks = [] self.target_transform = target_transform self.data_path =", "from joblib import Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels = 3 image_size =", "cv2.resize(target, self.size) # Convert to NCHW format and normalize to -1 to 1", "self ''' Input: Index of image to return Output: Image in the format", "from torch.utils.data import Dataset from glob import glob from os.path import join, exists", "= torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be made up of 0s and 1s only!", "result[1] def img_path(self, index): return self.images[index] def label_path(self, index): return self.segmasks[index] def __iter__(self):", "import Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels = 3 image_size = 256 mean", "= config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction =", "transform self.images = [] self.segmasks = [] self.target_transform = target_transform self.data_path = join(self.root,", "from glob import glob from os.path import join, exists import json from cycada.data.data_loader", "# Convert to NCHW format and normalize to -1 to 1 # WARNING:", "= None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r') as f: config = json.load(f)", "self.runs = ['run0'] self.transform = transform self.images = [] self.segmasks = [] self.target_transform", "img_path(self, index): return self.images[index] def label_path(self, index): return self.segmasks[index] def __iter__(self): return self", "must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root, params, num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None):", "= timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects) end = timer() print(timedelta(seconds=end-start))", "timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects) end = timer() print(timedelta(seconds=end-start)) for", "= config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): # root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name,", "in imagesubjects: images = images + sorted(glob(join(imagesubject, '*'))) segmasks = segmasks + sorted(glob(join(imagesubject.replace('images',", "= [] segmasks = [] for imagesubject in imagesubjects: images = images +", "(channels = 1 is understood) - not normalized because they are class labels", "be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root, params, num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None): self.root", "images = [] segmasks = [] for imagesubject in imagesubjects: images = images", "Change if necessary to old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be", "'*'))) return [images, segmasks] def collect_ids(self): from timeit import default_timer as timer from", "def collect_ids(self): from timeit import default_timer as timer from datetime import timedelta #", "max normalization. Change if necessary to old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target", "= None black = False def __init__(self, name): config = None print(\"PARAM: {}\".format(os.getcwd()))", "normalized because they are class labels ''' def __getitem__(self, index): img_path = self.img_path(index)", "self.fraction = config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): #", "labels ''' def __getitem__(self, index): img_path = self.img_path(index) label_path = self.label_path(index) img =", "sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images, segmasks] def collect_ids(self): from timeit import default_timer as", "made up of 0s and 1s only! target = torch.Tensor(target.transpose(2, 0, 1)).mean(dim=0) return", "self.fraction = params.fraction if (self.split == 'train') else 1.0 self.collect_ids() def get_subject_data(self, subjectpath):", "= cv2.imread(img_path, 0) img_temp = np.expand_dims(img, axis = 2) img = np.concatenate((img_temp, img_temp,", "+ sorted(glob(join(imagesubject, '*'))) segmasks = segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images, segmasks]", "name, root, params, num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None): self.root = root self.split =", "os from torch.utils.data import Dataset from glob import glob from os.path import join,", "in subjects) end = timer() print(timedelta(seconds=end-start)) for result in results: self.images = self.images", "target_transform = None black = False def __init__(self, name): config = None print(\"PARAM:", "'*'))) start = timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects) end =", "num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None): self.root = root self.split = split self.remap_labels =", "normalization, we did min max normalization. Change if necessary to old one. img", "config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): # root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root,", "from datetime import timedelta # Parallelize the for loop for run in self.runs:", "= 0.5 num_cls = 2 fraction = 1.0 target_transform = None black =", "'images') imagesubjects = glob(join(imagepath, '*')) images = [] segmasks = [] for imagesubject", "config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black = config[\"black\"]", "return [images, segmasks] def collect_ids(self): from timeit import default_timer as timer from datetime", "+ result[0] self.segmasks = self.segmasks + result[1] def img_path(self, index): return self.images[index] def", "self.data_path = join(self.root, self.split) self.num_cls = num_cls self.size = (params.image_size, params.image_size) self.bw_flag =", "os.path import join, exists import json from cycada.data.data_loader import register_data_params, register_dataset_obj from cycada.data.data_loader", "loop for run in self.runs: runpath = join(self.data_path, run) subjects = sorted(glob(join(runpath, '*')))", "imagepath = join(subjectpath, 'images') imagesubjects = glob(join(imagepath, '*')) images = [] segmasks =", "self.name = name self.runs = ['run0'] self.transform = transform self.images = [] self.segmasks", "index): img_path = self.img_path(index) label_path = self.label_path(index) img = None if self.bw_flag: img", "import convert_image_by_pixformat_normalize import multiprocessing as mp from joblib import Parallel, delayed @register_data_params('surreal') class", "__init__(self, name): config = None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r') as f:", "def get_subject_data(self, subjectpath): imagepath = join(subjectpath, 'images') imagesubjects = glob(join(imagepath, '*')) images =", "@register_dataset_obj('surreal') class SurrealLoader(Dataset): # root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root, params,", "from cycada.data.data_loader import register_data_params, register_dataset_obj from cycada.data.data_loader import DatasetParams import cv2 from cycada.data.util", "normalize to -1 to 1 # WARNING: Original code did mean normalization, we", "= config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform =", "Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects) end = timer() print(timedelta(seconds=end-start)) for result in results:", "target_transform self.data_path = join(self.root, self.split) self.num_cls = num_cls self.size = (params.image_size, params.image_size) self.bw_flag", "datetime import timedelta # Parallelize the for loop for run in self.runs: runpath", "= self.img_path(index) label_path = self.label_path(index) img = None if self.bw_flag: img = cv2.imread(img_path,", "numpy as np import scipy.io import torch import os from torch.utils.data import Dataset", "f: config = json.load(f) self.num_channels = config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean = config[\"mean\"]", "= (params.image_size, params.image_size) self.bw_flag = params.black self.seed = 255 self.fraction = params.fraction if", "= 256 mean = 0.5 num_cls = 2 fraction = 1.0 target_transform =", "from timeit import default_timer as timer from datetime import timedelta # Parallelize the", "/scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root, params, num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None): self.root =", "must be made up of 0s and 1s only! target = torch.Tensor(target.transpose(2, 0,", "import DatasetParams import cv2 from cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing as mp from", "config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform = config[\"target_transform\"]", "None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r') as f: config = json.load(f) self.num_channels", "self.black = config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): # root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self,", "self.bw_flag: img = cv2.imread(img_path, 0) img_temp = np.expand_dims(img, axis = 2) img =", "np.concatenate((img_temp, img_temp, img_temp), axis=2) else: img = cv2.imread(img_path) target = cv2.imread(label_path) img =", "torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be made up of 0s and 1s only! target", "get_subject_data(self, subjectpath): imagepath = join(subjectpath, 'images') imagesubjects = glob(join(imagepath, '*')) images = []", "config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): # root must", "split self.remap_labels = remap_labels self.name = name self.runs = ['run0'] self.transform = transform", "segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images, segmasks] def collect_ids(self): from timeit import", "register_data_params, register_dataset_obj from cycada.data.data_loader import DatasetParams import cv2 from cycada.data.util import convert_image_by_pixformat_normalize import", "= config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): # root", "Original code did mean normalization, we did min max normalization. Change if necessary", "- not normalized because they are class labels ''' def __getitem__(self, index): img_path", "Segmask in the format NHW (channels = 1 is understood) - not normalized", "self.split = split self.remap_labels = remap_labels self.name = name self.runs = ['run0'] self.transform", "[images, segmasks] def collect_ids(self): from timeit import default_timer as timer from datetime import", "print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r') as f: config = json.load(f) self.num_channels =", "imagesubject in imagesubjects: images = images + sorted(glob(join(imagesubject, '*'))) segmasks = segmasks +", "SurrealParams(DatasetParams): num_channels = 3 image_size = 256 mean = 0.5 num_cls = 2", "default_timer as timer from datetime import timedelta # Parallelize the for loop for", "axis = 2) img = np.concatenate((img_temp, img_temp, img_temp), axis=2) else: img = cv2.imread(img_path)", "result in results: self.images = self.images + result[0] self.segmasks = self.segmasks + result[1]", "self.seed = 255 self.fraction = params.fraction if (self.split == 'train') else 1.0 self.collect_ids()", "subjectpath): imagepath = join(subjectpath, 'images') imagesubjects = glob(join(imagepath, '*')) images = [] segmasks", "img_temp), axis=2) else: img = cv2.imread(img_path) target = cv2.imread(label_path) img = cv2.resize(img, self.size)", "'*')) images = [] segmasks = [] for imagesubject in imagesubjects: images =", "0s and 1s only! target = torch.Tensor(target.transpose(2, 0, 1)).mean(dim=0) return img, target def", "segmasks] def collect_ids(self): from timeit import default_timer as timer from datetime import timedelta", "= self.label_path(index) img = None if self.bw_flag: img = cv2.imread(img_path, 0) img_temp =", "# WARNING: Original code did mean normalization, we did min max normalization. Change", "self.size) # Convert to NCHW format and normalize to -1 to 1 #", "# root must be /scratch/users/aditya/adult/SURREAL/surreal/download/SURREAL/data/cmu def __init__(self, name, root, params, num_cls=2, split='train', remap_labels=True,", "glob(join(imagepath, '*')) images = [] segmasks = [] for imagesubject in imagesubjects: images", "-1 to 1 # WARNING: Original code did mean normalization, we did min", "= join(self.root, self.split) self.num_cls = num_cls self.size = (params.image_size, params.image_size) self.bw_flag = params.black", "num_channels = 3 image_size = 256 mean = 0.5 num_cls = 2 fraction", "join, exists import json from cycada.data.data_loader import register_data_params, register_dataset_obj from cycada.data.data_loader import DatasetParams", "to -1 to 1 # WARNING: Original code did mean normalization, we did", "2 fraction = 1.0 target_transform = None black = False def __init__(self, name):", "for subject in subjects) end = timer() print(timedelta(seconds=end-start)) for result in results: self.images", "= split self.remap_labels = remap_labels self.name = name self.runs = ['run0'] self.transform =", "is understood) - not normalized because they are class labels ''' def __getitem__(self,", "= 255 self.fraction = params.fraction if (self.split == 'train') else 1.0 self.collect_ids() def", "params.black self.seed = 255 self.fraction = params.fraction if (self.split == 'train') else 1.0", "segmasks = [] for imagesubject in imagesubjects: images = images + sorted(glob(join(imagesubject, '*')))", "to old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be made up of", "name+\".json\"), 'r') as f: config = json.load(f) self.num_channels = config[\"num_channels\"] self.image_size = config[\"image_size\"]", "in the format NHW (channels = 1 is understood) - not normalized because", "code did mean normalization, we did min max normalization. Change if necessary to", "image to return Output: Image in the format NCHW - normalized Segmask in", "collect_ids(self): from timeit import default_timer as timer from datetime import timedelta # Parallelize", "= 1.0 target_transform = None black = False def __init__(self, name): config =", "did min max normalization. Change if necessary to old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img))", "[] self.target_transform = target_transform self.data_path = join(self.root, self.split) self.num_cls = num_cls self.size =", "torch import os from torch.utils.data import Dataset from glob import glob from os.path", "run) subjects = sorted(glob(join(runpath, '*'))) start = timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject", "self.images = [] self.segmasks = [] self.target_transform = target_transform self.data_path = join(self.root, self.split)", "self.images[index] def label_path(self, index): return self.segmasks[index] def __iter__(self): return self ''' Input: Index", "target must be made up of 0s and 1s only! target = torch.Tensor(target.transpose(2,", "= cv2.imread(label_path) img = cv2.resize(img, self.size) target = cv2.resize(target, self.size) # Convert to", "= params.fraction if (self.split == 'train') else 1.0 self.collect_ids() def get_subject_data(self, subjectpath): imagepath", "sorted(glob(join(runpath, '*'))) start = timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects) end", "= self.images + result[0] self.segmasks = self.segmasks + result[1] def img_path(self, index): return", "<reponame>AdityaAS/cycada<filename>cycada/data/surreal.py import numpy as np import scipy.io import torch import os from torch.utils.data", "torch.utils.data import Dataset from glob import glob from os.path import join, exists import", "subject in subjects) end = timer() print(timedelta(seconds=end-start)) for result in results: self.images =", "= cv2.resize(img, self.size) target = cv2.resize(target, self.size) # Convert to NCHW format and", "1 is understood) - not normalized because they are class labels ''' def", "json.load(f) self.num_channels = config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls = config[\"num_cls\"]", "img_temp = np.expand_dims(img, axis = 2) img = np.concatenate((img_temp, img_temp, img_temp), axis=2) else:", "timedelta # Parallelize the for loop for run in self.runs: runpath = join(self.data_path,", "mp from joblib import Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels = 3 image_size", "img = None if self.bw_flag: img = cv2.imread(img_path, 0) img_temp = np.expand_dims(img, axis", "min max normalization. Change if necessary to old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING:", "self.num_cls = num_cls self.size = (params.image_size, params.image_size) self.bw_flag = params.black self.seed = 255", "self.img_path(index) label_path = self.label_path(index) img = None if self.bw_flag: img = cv2.imread(img_path, 0)", "self.root = root self.split = split self.remap_labels = remap_labels self.name = name self.runs", "+ result[1] def img_path(self, index): return self.images[index] def label_path(self, index): return self.segmasks[index] def", "in self.runs: runpath = join(self.data_path, run) subjects = sorted(glob(join(runpath, '*'))) start = timer()", "remap_labels self.name = name self.runs = ['run0'] self.transform = transform self.images = []", "self.label_path(index) img = None if self.bw_flag: img = cv2.imread(img_path, 0) img_temp = np.expand_dims(img,", "config = None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r') as f: config =", "the format NCHW - normalized Segmask in the format NHW (channels = 1", "mean normalization, we did min max normalization. Change if necessary to old one.", "multiprocessing as mp from joblib import Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels =", "None if self.bw_flag: img = cv2.imread(img_path, 0) img_temp = np.expand_dims(img, axis = 2)", "json from cycada.data.data_loader import register_data_params, register_dataset_obj from cycada.data.data_loader import DatasetParams import cv2 from", "1.0 target_transform = None black = False def __init__(self, name): config = None", "format NHW (channels = 1 is understood) - not normalized because they are", "segmasks = segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images, segmasks] def collect_ids(self): from", "None black = False def __init__(self, name): config = None print(\"PARAM: {}\".format(os.getcwd())) with", "[] for imagesubject in imagesubjects: images = images + sorted(glob(join(imagesubject, '*'))) segmasks =", "self.images + result[0] self.segmasks = self.segmasks + result[1] def img_path(self, index): return self.images[index]", "in the format NCHW - normalized Segmask in the format NHW (channels =", "Input: Index of image to return Output: Image in the format NCHW -", "cycada.data.data_loader import DatasetParams import cv2 from cycada.data.util import convert_image_by_pixformat_normalize import multiprocessing as mp", "else: img = cv2.imread(img_path) target = cv2.imread(label_path) img = cv2.resize(img, self.size) target =", "delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels = 3 image_size = 256 mean = 0.5", "1.0 self.collect_ids() def get_subject_data(self, subjectpath): imagepath = join(subjectpath, 'images') imagesubjects = glob(join(imagepath, '*'))", "cv2.imread(img_path, 0) img_temp = np.expand_dims(img, axis = 2) img = np.concatenate((img_temp, img_temp, img_temp),", "np import scipy.io import torch import os from torch.utils.data import Dataset from glob", "= 2 fraction = 1.0 target_transform = None black = False def __init__(self,", "{}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"), 'r') as f: config = json.load(f) self.num_channels = config[\"num_channels\"]", "the format NHW (channels = 1 is understood) - not normalized because they", "necessary to old one. img = torch.Tensor(convert_image_by_pixformat_normalize(img)) #WARNING: target must be made up", "for loop for run in self.runs: runpath = join(self.data_path, run) subjects = sorted(glob(join(runpath,", "__init__(self, name, root, params, num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None): self.root = root self.split", "= join(subjectpath, 'images') imagesubjects = glob(join(imagepath, '*')) images = [] segmasks = []", "self.mean = config[\"mean\"] self.num_cls = config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black", "from os.path import join, exists import json from cycada.data.data_loader import register_data_params, register_dataset_obj from", "+ sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images, segmasks] def collect_ids(self): from timeit import default_timer", "== 'train') else 1.0 self.collect_ids() def get_subject_data(self, subjectpath): imagepath = join(subjectpath, 'images') imagesubjects", "only! target = torch.Tensor(target.transpose(2, 0, 1)).mean(dim=0) return img, target def __len__(self): return len(self.images)", "imagesubjects: images = images + sorted(glob(join(imagesubject, '*'))) segmasks = segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'),", "import numpy as np import scipy.io import torch import os from torch.utils.data import", "= [] for imagesubject in imagesubjects: images = images + sorted(glob(join(imagesubject, '*'))) segmasks", "results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject) for subject in subjects) end = timer() print(timedelta(seconds=end-start)) for result", "import scipy.io import torch import os from torch.utils.data import Dataset from glob import", "imagesubjects = glob(join(imagepath, '*')) images = [] segmasks = [] for imagesubject in", "index): return self.segmasks[index] def __iter__(self): return self ''' Input: Index of image to", "img_temp, img_temp), axis=2) else: img = cv2.imread(img_path) target = cv2.imread(label_path) img = cv2.resize(img,", "self.target_transform = config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal') class SurrealLoader(Dataset): # root must be", "(params.image_size, params.image_size) self.bw_flag = params.black self.seed = 255 self.fraction = params.fraction if (self.split", "0.5 num_cls = 2 fraction = 1.0 target_transform = None black = False", "params, num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None): self.root = root self.split = split self.remap_labels", "@register_data_params('surreal') class SurrealParams(DatasetParams): num_channels = 3 image_size = 256 mean = 0.5 num_cls", "import register_data_params, register_dataset_obj from cycada.data.data_loader import DatasetParams import cv2 from cycada.data.util import convert_image_by_pixformat_normalize", "sorted(glob(join(imagesubject, '*'))) segmasks = segmasks + sorted(glob(join(imagesubject.replace('images', 'segmasks'), '*'))) return [images, segmasks] def", "# Parallelize the for loop for run in self.runs: runpath = join(self.data_path, run)", "did mean normalization, we did min max normalization. Change if necessary to old", "name self.runs = ['run0'] self.transform = transform self.images = [] self.segmasks = []", "import timedelta # Parallelize the for loop for run in self.runs: runpath =", "= join(self.data_path, run) subjects = sorted(glob(join(runpath, '*'))) start = timer() results = Parallel(n_jobs=mp.cpu_count())(delayed(self.get_subject_data)(subject)", "255 self.fraction = params.fraction if (self.split == 'train') else 1.0 self.collect_ids() def get_subject_data(self,", "run in self.runs: runpath = join(self.data_path, run) subjects = sorted(glob(join(runpath, '*'))) start =", "= ['run0'] self.transform = transform self.images = [] self.segmasks = [] self.target_transform =", "= json.load(f) self.num_channels = config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean = config[\"mean\"] self.num_cls =", "= False def __init__(self, name): config = None print(\"PARAM: {}\".format(os.getcwd())) with open(join(\"dataset_configs\", name+\".json\"),", "import torch import os from torch.utils.data import Dataset from glob import glob from", "'r') as f: config = json.load(f) self.num_channels = config[\"num_channels\"] self.image_size = config[\"image_size\"] self.mean", "[] segmasks = [] for imagesubject in imagesubjects: images = images + sorted(glob(join(imagesubject,", "normalized Segmask in the format NHW (channels = 1 is understood) - not", "def label_path(self, index): return self.segmasks[index] def __iter__(self): return self ''' Input: Index of", "#WARNING: target must be made up of 0s and 1s only! target =", "def __init__(self, name, root, params, num_cls=2, split='train', remap_labels=True, transform=None, target_transform=None): self.root = root", "joblib import Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams): num_channels = 3 image_size = 256", "not normalized because they are class labels ''' def __getitem__(self, index): img_path =", "self.segmasks = self.segmasks + result[1] def img_path(self, index): return self.images[index] def label_path(self, index):", "transform=None, target_transform=None): self.root = root self.split = split self.remap_labels = remap_labels self.name =", "params.image_size) self.bw_flag = params.black self.seed = 255 self.fraction = params.fraction if (self.split ==", "img = cv2.imread(img_path, 0) img_temp = np.expand_dims(img, axis = 2) img = np.concatenate((img_temp,", "NHW (channels = 1 is understood) - not normalized because they are class", "target = cv2.resize(target, self.size) # Convert to NCHW format and normalize to -1", "self.num_cls = config[\"num_cls\"] self.fraction = config[\"fraction\"] self.target_transform = config[\"target_transform\"] self.black = config[\"black\"] @register_dataset_obj('surreal')", "img_path = self.img_path(index) label_path = self.label_path(index) img = None if self.bw_flag: img =", "timer() print(timedelta(seconds=end-start)) for result in results: self.images = self.images + result[0] self.segmasks =", "convert_image_by_pixformat_normalize import multiprocessing as mp from joblib import Parallel, delayed @register_data_params('surreal') class SurrealParams(DatasetParams):", "image_size = 256 mean = 0.5 num_cls = 2 fraction = 1.0 target_transform", "0) img_temp = np.expand_dims(img, axis = 2) img = np.concatenate((img_temp, img_temp, img_temp), axis=2)" ]
[ "call( 'busarrivalservice', { 'stationId': station_id, 'routeId': route_id } ) if response is None:", "fetch(station_id: str, route_id: str): response = call( 'busarrivalservice', { 'stationId': station_id, 'routeId': route_id", "'busarrivalservice', { 'stationId': station_id, 'routeId': route_id } ) if response is None: return", "} ) if response is None: return None return ''.join( map( lambda list_element:", "# bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str, route_id: str): response =", "import BusArrivalItem from api import call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def", "def fetch(station_id: str, route_id: str): response = call( 'busarrivalservice', { 'stationId': station_id, 'routeId':", "'routeId': route_id } ) if response is None: return None return ''.join( map(", "api import call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str, route_id:", "route_id } ) if response is None: return None return ''.join( map( lambda", "None return ''.join( map( lambda list_element: str(BusArrivalItem(list_element)), response ) ) if __name__ ==", "''.join( map( lambda list_element: str(BusArrivalItem(list_element)), response ) ) if __name__ == '__main__': print(fetch('218000952',", "'stationId': station_id, 'routeId': route_id } ) if response is None: return None return", "{ 'stationId': station_id, 'routeId': route_id } ) if response is None: return None", ") if response is None: return None return ''.join( map( lambda list_element: str(BusArrivalItem(list_element)),", "return ''.join( map( lambda list_element: str(BusArrivalItem(list_element)), response ) ) if __name__ == '__main__':", "if response is None: return None return ''.join( map( lambda list_element: str(BusArrivalItem(list_element)), response", "str): response = call( 'busarrivalservice', { 'stationId': station_id, 'routeId': route_id } ) if", "route_id: str): response = call( 'busarrivalservice', { 'stationId': station_id, 'routeId': route_id } )", "# print(bus_arrival_item) def fetch(station_id: str, route_id: str): response = call( 'busarrivalservice', { 'stationId':", "response is None: return None return ''.join( map( lambda list_element: str(BusArrivalItem(list_element)), response )", "from api import call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str,", "BusArrivalItem import BusArrivalItem from api import call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item)", "import call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str, route_id: str):", "BusArrivalItem from api import call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id:", "None: return None return ''.join( map( lambda list_element: str(BusArrivalItem(list_element)), response ) ) if", "is None: return None return ''.join( map( lambda list_element: str(BusArrivalItem(list_element)), response ) )", "map( lambda list_element: str(BusArrivalItem(list_element)), response ) ) if __name__ == '__main__': print(fetch('218000952', '241449005'))", "BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str, route_id: str): response = call( 'busarrivalservice', {", "from BusArrivalItem import BusArrivalItem from api import call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) #", "= BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str, route_id: str): response = call( 'busarrivalservice',", "print(bus_arrival_item) def fetch(station_id: str, route_id: str): response = call( 'busarrivalservice', { 'stationId': station_id,", "return None return ''.join( map( lambda list_element: str(BusArrivalItem(list_element)), response ) ) if __name__", "response = call( 'busarrivalservice', { 'stationId': station_id, 'routeId': route_id } ) if response", "str, route_id: str): response = call( 'busarrivalservice', { 'stationId': station_id, 'routeId': route_id }", "bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str, route_id: str): response = call(", "= call( 'busarrivalservice', { 'stationId': station_id, 'routeId': route_id } ) if response is", "station_id, 'routeId': route_id } ) if response is None: return None return ''.join(", "call # bus_arrival_item = BusArrivalItem(xml_root.find('msgBody').find('busArrivalItem')) # print(bus_arrival_item) def fetch(station_id: str, route_id: str): response" ]
[ "with all the border index populated \"\"\" borders = np.array(np.where(value == border_val)).T new_label", "0, som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/' + save_name + '_plot.png' plt.savefig(save_dir) print('Plot saved", "SOMs. The size returned is sqrt(5 * sqrt(num_sample)), with the exception that the", "or list data matrix dim : int dimension of the SOMs distance matrix", "{k: random.sample(v, 1)[0] for k, v in param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i] =", "axes.ravel() ws_labels = np.zeros((num_bins * ncols, image.shape[0], image.shape[1])) for i in range(num_bins): val", "re_all=False): \"\"\"Evaluate and return the best watershed prediction result Parameters ---------- in_data :", "5 Returns ------- np.array new label with all the border index populated \"\"\"", "the watershed level or not, by default False conn : int, optional connectivity", "lr=%.6f, sigma=%.6f\\ result to very small / large number of clusters (n_clusters =", "500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return", "3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols", "int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels", "save report to file fdir = save_name + '_report.csv' print('Report saved at', fdir)", "ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed, np.nan)", "idx': i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i],", "tuple list of initial guess of the parameters, in order of dimension, number", "+ 4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols +", "\"\"\" som = som_assemble(in_data, seed, dim, lr, sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix =", "the best watershed prediction result Parameters ---------- in_data : np.array or list data", "10 else: return dim def som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs", "5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels def eval_ws(in_data, ws_labels, n_map, label=None,", "watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) _c, _as, _ch = eval_ws(in_data, ws_labels, n_map) cluster_labels[i],", "def watershed_level(image, bins, border_width=0.1, plot=False, conn=None): num_bins = len(bins) \"\"\"Computes and classify the", "- 10 if g_dim - 5 > 10 else 10 max_dim = g_dim", "bins[i]) markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3] = closest_n(markers) - 1 ws_labels[i*ncols", "name which will be used to save the plot as png file, by", "# find index of the closest k neighbors dist = distance.cdist([b], vals) c_idx", "by default 5 Returns ------- np.array new label with all the border index", "from neuron map\"\"\" som_class = [] for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class)", "distance map / u-matrix of the SOMs Parameters ---------- som : MiniSom trained", "indexes by using the K-nearest neighbor method Parameters ---------- value : np.array numpy", "k neighbors dist = distance.cdist([b], vals) c_idx = np.argpartition(dist, k) c_idx = c_idx[0,", "best watershed labels, may contain more than one set \"\"\" len_watershed = ws_labels.shape[0]", "seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs model for training Parameters ---------- in_data", "of Minisom class, see minisom.py for further details \"\"\" # Initialization som and", "of the closest value c_idx = distance.cdist([b], vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx,", "len(in_data))) avg_sils = np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed, np.nan) if(label is not None):", "cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0], _as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters < 5", "3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols", "bins \"\"\" ttl_dtp = len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin + 1), np.arange(ttl_dtp), np.sort(in_data))", "bins, border_width=0.1, plot=False, conn=None): num_bins = len(bins) \"\"\"Computes and classify the SOM's u-matrix", "som.neuron_map(in_data) _c, _as, _ch = eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0],", "Parameters ---------- in_data : np.array or list data matrix ws_labels : np.array predicted", "20 param_grid = { 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it - 500, g_it +", "seed, dim, lr, sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten())", "i in range(num_bins): val = filters.threshold_local(image, block_size=3 + 2*i) block_mask = (image <", ": float, optional spread of the neighborhood function, by default 2.5 Returns -------", "0], vals[c_idx, 1]))) class_counter = Counter() for idx in mins_idx: class_counter[value[idx[0], idx[1]]] +=", "the minimum dimension size = 10 Parameters ---------- num_sample : int Total number", "logging import sys logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\"", "som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/' + save_name + '_plot.png' plt.savefig(save_dir) print('Plot saved at:',", "from each watershed level \"\"\" ncols = 6 if(plot): fig, axes = plt.subplots(ncols=ncols,", "n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw:", "distance matrix lr : float, optional learning rate, by default 0.5 sigma :", "reproducibility, by default 10 Returns ------- All cluster label and its counterpart parameters.", "dim < 10: return 10 else: return dim def som_assemble(in_data, seed, dim, lr=0.5,", "level \"\"\" ncols = 6 if(plot): fig, axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3),", "of the SOMs. This function returns the dimension size of the SOMs. The", "\\ '\\nelapsed time,' + str(et) + '\\n\\n' # save report to file fdir", "np.array(vals).T for b in borders: # find index of the closest k neighbors", "/ u-matrix of the SOMs Parameters ---------- som : MiniSom trained Minisom object", "dimension, number of iterations, learning rate, and sigma max_eval : int, optional number", "saved at:', fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram where each bin will", "< 5): logging.info(\"Not enough labeled neighbor to perform KNN.\\n\\ Will return the original", "ax2) = plt.subplots(1, 2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image,", "np.where(value != 0) vals = np.array(vals).T for b in borders: # find index", "the SOMs bins : np.array numpy array of all the histogram bins plot", "open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:', fdir) def histedges_equalN(in_data,", "label=None, re_all=False): \"\"\"Evaluate and return the best watershed prediction result Parameters ---------- in_data", "class_counter[value[idx[0], idx[1]]] += 1 cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl return new_label", "num_bins = len(bins) \"\"\"Computes and classify the SOM's u-matrix or total gradient using", "= random_walker(image, markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn:", "for the sample xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save):", "= np.full(len_watershed, np.nan) if(label is not None): avg_ents = np.full(len_watershed, np.nan) avg_purs =", "num=30)), } return param_grid def random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform random", "map as background plt.colorbar() for t, xx in zip(label, in_data): w = som.winner(xx)", "init_guess min_dim = g_dim - 10 if g_dim - 5 > 10 else", "value Parameters ---------- value : np.array numpy array of the cluster number, noted", "5] = random_walker(image, markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols +", "g_dim, g_it, g_lr, g_sigma = init_guess min_dim = g_dim - 10 if g_dim", "optional random seed for reproducibility, by default 10 Returns ------- All cluster label", "as png file, by default 'temp' \"\"\" plt.figure(figsize=(9, 7)) # Plotting the response", "return new_label def KNN(value, k=5, border_val=0): \"\"\"Assign cluster number to the mask's border", "num_sample : int Total number of data points that will populate the SOMs", "False save_name : str, optional the name which will be used to save", "< 10: return 10 else: return dim def som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5):", "_as, _ch = eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0], _as[0], _ch[0]", "\"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess) dims = np.zeros(max_eval) iters = np.zeros(max_eval) lrs =", "lrs[i], sigmas[i] = list(random_params.values()) som = som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]),", "np.array numpy array of all the histogram bins plot : bool, optional flag", "cluster number, noted that the borders are marked with 0 Returns ------- np.array", "matrix dim : int dimension of the SOMs distance matrix iter_cnt : integer", "'\\nelapsed time,' + str(et) + '\\n\\n' # save report to file fdir =", "return hist[1] def gen_e_model(n_map, som_label): \"\"\"generates the Earth model from neuron map\"\"\" som_class", "som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) _c, _as,", "an object of Minisom class, see minisom.py for further details \"\"\" # Initialization", "w = som.winner(xx) # getting the winner # palce a marker on the", "All cluster label and its counterpart parameters. \"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess) dims", "be used to save the plot as png file, by default 'temp' \"\"\"", "avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt, lr, sigma, seed=10): \"\"\"Method to fully run", "def random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform random search for SOMs best", "data points Parameters ---------- in_data : np.array or list data array nbin :", "optional the true label of each data point Returns ------- np.array list of", "Parameters ---------- in_data : np.array or list data array nbin : int number", "seed for reproducibility, by default 10 Returns ------- All cluster label and its", "plt import itertools from skimage import measure from skimage.segmentation import random_walker from skimage", "list data matrix label : np.array or list the true label of each", "str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/' + save_name +", "lr : float, optional learning rate, by default 0.5 sigma : float, optional", "1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols", "watershed classification method Parameters ---------- image : np.array u-matrix or total gradient of", "measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3] = closest_n(markers) - 1 ws_labels[i*ncols + 4] =", "for further details \"\"\" # Initialization som and weights num_features = np.shape(in_data)[1] som", "is sqrt(5 * sqrt(num_sample)), with the exception that the minimum dimension size =", "true label of each data point, by default None seed : integer, optional", "bins \"\"\" f_image = som_u_mat.flatten() fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))", "histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram where each bin will contain the same number", "u-matrix or total gradient using watershed classification method Parameters ---------- image : np.array", "5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return hist[1] def", "from acse_9_irp_wafflescore import MiscHelpers as mh import logging import sys logging.basicConfig(format='%(asctime)s | %(levelname)s", "list(range(g_it - 500, g_it + 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma':", "the SOM's u-matrix or total gradient using watershed classification method Parameters ---------- image", "to perform lr : float learning rate sigma : float spread of the", "flag, by default False save_name : str, optional the name which will be", "sigma, seed=10): \"\"\"Method to fully run SOMs Parameters ---------- in_data : np.array or", "optional number of max iterartion to perform the search, by default 20 label", "Counter() for idx in mins_idx: class_counter[value[idx[0], idx[1]]] += 1 cl = class_counter.most_common(1)[0][0] new_label[b[0],", "sqrt(5 * sqrt(num_sample)), with the exception that the minimum dimension size = 10", "print('Report saved at', fdir) mode = 'w' f1 = open(fdir, mode) f1.write(param_vals) if(report):", "= watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) cluster_labels, _, _ = eval_ws(in_data, ws_labels, n_map)", "= np.argpartition(dist, k) c_idx = c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1])))", "origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask = (image <=", "best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher = better if(label is not", "3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols", "the border index populated \"\"\" borders = np.array(np.where(value == 0)).T new_label = np.array(value)", "som, cluster_labels def gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma = init_guess min_dim = g_dim", "np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the distance map / u-matrix of the SOMs Parameters", "list of best watershed labels, may contain more than one set \"\"\" len_watershed", "+ 1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1]))))", "+ 4] = KNN(markers) - 1 ws_labels[i*ncols + 5] = random_walker(image, markers) if(plot):", "the dimension size of the SOMs. The size returned is sqrt(5 * sqrt(num_sample)),", "learning rate, by default 0.5 sigma : float, optional spread of the neighborhood", "that the borders are marked with 0 k : int, optional number of", "using the K-nearest neighbor method Parameters ---------- value : np.array numpy array of", "plot : bool, optional flag whether to plot the watershed level or not,", "int seed : integer, optional random seed for reproducibility, by default 10 Returns", "avg_ents = np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed, np.nan) for i in range(len_watershed): param", "to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, best_idx) else: return", "true label of each data point save : bool, optional flag, by default", "else: return dim def som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs model", "0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils,", "10: return 10 else: return dim def som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize", "the cluster number, noted that the borders are marked with 0 k :", "the true label of each data point Returns ------- np.array list of best", "function, by default 2.5 Returns ------- MiniSom an object of Minisom class, see", "ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols +", "if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param)", "2*i) block_mask = (image < val) markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers)", "as mh import logging import sys logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout)", "som_class = [] for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value): \"\"\"Assign", "'w' f1 = open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:',", "are marked with 0 Returns ------- np.array new label with all the border", "border indexes by using the K-nearest neighbor method Parameters ---------- value : np.array", "f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:', fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram where", "figsize=(12, num_bins*3), sharex=True, sharey=True) ax = axes.ravel() ws_labels = np.zeros((num_bins * ncols, image.shape[0],", "som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs model for training Parameters ----------", "in order of dimension, number of iterations, learning rate, and sigma max_eval :", ": integer, optional random seed for reproducibility, by default 10 Returns ------- All", "flag whether to plot the watershed level or not, by default False conn", "import distance from collections import Counter from timeit import default_timer as timer import", "and weights num_features = np.shape(in_data)[1] som = MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian',", "\"\"\"perform random search for SOMs best parameters. Parameters ---------- in_data : np.array or", "10 if g_dim + 10 > 10 else 20 param_grid = { 'dim':", "random seed for reproducibility, by default 10 Returns ------- minisom minisom object np.array", "= eval_ws(in_data, ws_labels, n_map) return som, cluster_labels def gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma", "None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to", "def plot_som(som, in_data, label, save=False, save_name='temp'): \"\"\"plots the distance map / u-matrix of", "som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) cluster_labels, _,", "% (dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label is not None): avg_ents[i], avg_purs[i]", "value.\") return value vals = np.array(vals).T for b in borders: # find index", "SOMs distance matrix lr : float, optional learning rate, by default 0.5 sigma", "training Parameters ---------- in_data : np.array or list data matrix seed : integer", "---------- in_data : np.array or list data matrix dim : int dimension of", "one set \"\"\" len_watershed = ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed,", "np.array or list data array nbin : int number of bins to populate,", "matrix iter_cnt : integer number of iterations for SOMs to perform lr :", "\"\"\" ttl_dtp = len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin + 1), np.arange(ttl_dtp), np.sort(in_data)) def", "init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i])) i += 1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils)))", "the exception that the minimum dimension size = 10 Parameters ---------- num_sample :", "for reproducibility, by default 10 Returns ------- All cluster label and its counterpart", "(ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image,", "som_assemble(in_data, seed, dim, lr, sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix = som.distance_map().T watershed_bins =", "eval_ws(in_data, ws_labels, n_map) return som, cluster_labels def gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma =", "Parameters ---------- image : np.array u-matrix or total gradient of the SOMs bins", "7)) # Plotting the response for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the", "the mask's border indexes by using the closest neighbor's value Parameters ---------- value", "fig, axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True) ax = axes.ravel() ws_labels", "+ 2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask =", "watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) cluster_labels, _, _ = eval_ws(in_data, ws_labels, n_map) return", "watershed_bins) n_map = som.neuron_map(in_data) cluster_labels, _, _ = eval_ws(in_data, ws_labels, n_map) return som,", "= measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3] = closest_n(markers) - 1 ws_labels[i*ncols + 4]", "sil=%.6f, ch=%.6f\" % (dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label is not None):", "markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i,", "all the histogram bins \"\"\" ttl_dtp = len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin +", "for SOMs to perform lr : float learning rate sigma : float spread", "K-nearest neighbor method Parameters ---------- value : np.array numpy array of the cluster", "np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter = Counter() for idx in mins_idx: class_counter[value[idx[0], idx[1]]]", "0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1],", "plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True) ax = axes.ravel() ws_labels = np.zeros((num_bins *", "= np.int(np.sqrt(dim)) if dim < 10: return 10 else: return dim def som_assemble(in_data,", "default 20 label : np.array or list, optional the true label of each", "border index populated \"\"\" borders = np.array(np.where(value == 0)).T new_label = np.array(value) vals", "watershed labels, may contain more than one set \"\"\" len_watershed = ws_labels.shape[0] cluster_labels", "f1 = open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:', fdir)", "ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5],", "closest neighbor's value Parameters ---------- value : np.array numpy array of the cluster", "neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som def plot_som(som, in_data, label, save=False, save_name='temp'): \"\"\"plots the", "1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols", "in_data : np.array or list data matrix dim : int dimension of the", "5 * np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if dim < 10: return 10 else:", "= [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher = better if(label", "np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan) ch_scs = np.full(max_eval, np.nan) cluster_labels", "labels from each watershed level \"\"\" ncols = 6 if(plot): fig, axes =", "axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True) ax = axes.ravel() ws_labels =", "= g_dim + 10 if g_dim + 10 > 10 else 20 param_grid", "= np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed, np.nan) if(label is not None): avg_ents =", "optional random seed for reproducibility, by default 10 Returns ------- minisom minisom object", "0 Returns ------- np.array new label with all the border index populated \"\"\"", "\\ '\\niterations,' + str(it) + \\ '\\nelapsed time,' + str(et) + '\\n\\n' #", "lr, sigma, seed=10): \"\"\"Method to fully run SOMs Parameters ---------- in_data : np.array", "the winner # palce a marker on the winning position for the sample", "base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return param_grid def random_search_som(in_data, init_guess, max_eval=20,", "avg_purs[i])) i += 1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs))", "'SOMs_results/' + save_name + '_plot.png' plt.savefig(save_dir) print('Plot saved at:', save_dir) plt.show() def save_som_report(som,", "new_label[b[0], b[1]] = cl return new_label def watershed_level(image, bins, border_width=0.1, plot=False, conn=None): num_bins", "Returns ------- np.array numpy array of all the histogram bins \"\"\" ttl_dtp =", "label of each data point Returns ------- np.array list of best watershed labels,", "np.full(max_eval, np.nan) ch_scs = np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval, len(in_data))) if(label is not", "default dimension of the SOMs. This function returns the dimension size of the", "default 10 Returns ------- minisom minisom object np.array cluster label \"\"\" som =", "marker on the winning position for the sample xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.))", "seed for reproducibility dim : int dimension of the SOMs distance matrix lr", "classification method Parameters ---------- image : np.array u-matrix or total gradient of the", "np.array numpy array of all the histogram bins \"\"\" f_image = som_u_mat.flatten() fig,", "= len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin + 1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots", ": bool, optional flag, by default False save_name : str, optional the name", "som : MiniSom trained Minisom object Returns ------- np.array numpy array of all", "---------- som : MiniSom trained Minisom object in_data : np.array or list data", "in borders: # find index of the closest k neighbors dist = distance.cdist([b],", "= distance.cdist([b], vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label def KNN(value,", "by default 'temp' \"\"\" plt.figure(figsize=(9, 7)) # Plotting the response for each litho-class", "to populate, by default 10 Returns ------- np.array numpy array of all the", "save_name : str, optional the name which will be used to save the", "else 10 max_dim = g_dim + 10 if g_dim + 10 > 10", "np.array numpy array of all the histogram bins \"\"\" ttl_dtp = len(in_data) return", "# Initialization som and weights num_features = np.shape(in_data)[1] som = MiniSom(dim, dim, num_features,", "/ u-matrix of the SOMs along with the label Parameters ---------- som :", "or list data matrix seed : integer random seed for reproducibility dim :", "Author: <NAME> GitHub: wafflescore \"\"\" from minisom import MiniSom, asymptotic_decay import numpy as", "is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i],", "+ 4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4]))))", "label : np.array or list the true label of each data point save", "gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i])", "som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix = som.distance_map().T watershed_bins =", "each data point Returns ------- np.array list of best watershed labels, may contain", "MiscHelpers as mh import logging import sys logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s', level=logging.INFO,", "if(label is not None): avg_ents = np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed, np.nan) for", "array of the winner neuron label : np.array or list, optional the true", "5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels def", "optional the name which will be used to save the plot as png", "numpy array of all the histogram bins \"\"\" f_image = som_u_mat.flatten() fig, (ax1,", "= g_dim - 10 if g_dim - 5 > 10 else 10 max_dim", "= som_u_mat.flatten() fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r')", "border_width=0.1, plot=False, conn=None): num_bins = len(bins) \"\"\"Computes and classify the SOM's u-matrix or", "search using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result to very small / large number", "of data points that will populate the SOMs Returns ------- int Ideal dimension.", "# closest to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, dims,", "1 cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl return new_label def watershed_level(image, bins,", "= som.neuron_map(in_data) _c, _as, _ch = eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i] =", "------- np.array new label with all the border index populated \"\"\" borders =", "in_data, label, save=False, save_name='temp'): \"\"\"plots the distance map / u-matrix of the SOMs", "measure.label, by default None Returns ------- np.array numpy array of predicted cluster labels", "matrix lr : float, optional learning rate, by default 0.5 sigma : float,", "return som def plot_som(som, in_data, label, save=False, save_name='temp'): \"\"\"plots the distance map /", ": float learning rate sigma : float spread of the neighborhood function, by", "in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value): \"\"\"Assign cluster number to the mask's", "= ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed,", "to plot the watershed level or not, by default False conn : int,", "= len(np.unique(cluster_labels[i])) if(n_clusters < 5 or n_clusters > 30): logging.info(\"Random search using dim=%d,", "color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/' + save_name + '_plot.png'", "save_name + '_plot.png' plt.savefig(save_dir) print('Plot saved at:', save_dir) plt.show() def save_som_report(som, save_name, it,", "Parameters ---------- som : MiniSom trained Minisom object Returns ------- np.array numpy array", "best watershed prediction result Parameters ---------- in_data : np.array or list data matrix", "np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if dim < 10: return 10 else: return dim", "if(n_clusters < 5 or n_clusters > 30): logging.info(\"Random search using dim=%d, iter=%d, lr=%.6f,", "contain more than one set \"\"\" len_watershed = ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data)))", "in mins_idx: class_counter[value[idx[0], idx[1]]] += 1 cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl", "---------- num_sample : int Total number of data points that will populate the", "4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols", "each data point save : bool, optional flag, by default False save_name :", "if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols", "size returned is sqrt(5 * sqrt(num_sample)), with the exception that the minimum dimension", "np.array cluster label \"\"\" som = som_assemble(in_data, seed, dim, lr, sigma) som.train_random(in_data, iter_cnt,", ": int number of bins to populate, by default 10 Returns ------- np.array", "for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value): \"\"\"Assign cluster number to", "is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) #", "mode = 'w' f1 = open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report", "random seed for reproducibility, by default 10 Returns ------- All cluster label and", "------- MiniSom an object of Minisom class, see minisom.py for further details \"\"\"", "of the SOMs distance matrix lr : float, optional learning rate, by default", "return ws_labels def eval_ws(in_data, ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate and return the best", "value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label def KNN(value, k=5, border_val=0): \"\"\"Assign cluster number to", "= 0 while i < max_eval: random_params = {k: random.sample(v, 1)[0] for k,", "neighbors dist = distance.cdist([b], vals) c_idx = np.argpartition(dist, k) c_idx = c_idx[0, :k]", "[] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher = better if(label is", "int dimension of the SOMs distance matrix iter_cnt : integer number of iterations", "to perform KNN.\\n\\ Will return the original inputted value.\") return value vals =", "and its counterpart parameters. \"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess) dims = np.zeros(max_eval) iters", "origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols +", "watershed segmentation n_map : np.array array of the winner neuron label : np.array", "np.array(vals).T for b in borders: # find index of the closest value c_idx", "num_features = np.shape(in_data)[1] som = MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data)", "filters.threshold_local(image, block_size=3 + 2*i) block_mask = (image < val) markers = measure.label(block_mask, connectivity=conn)", "it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols +", "MiniSom, asymptotic_decay import numpy as np import matplotlib.pyplot as plt import itertools from", "0) vals = np.array(vals).T for b in borders: # find index of the", "gradient of the SOMs bins : np.array numpy array of all the histogram", "param_grid = gen_param_grid(init_guess) dims = np.zeros(max_eval) iters = np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas", "import sys logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute", "of best watershed labels, may contain more than one set \"\"\" len_watershed =", "number of clusters (n_clusters = %d)\\ \" % (dims[i], iters[i], lrs[i], sigmas[i], n_clusters))", "numpy as np import matplotlib.pyplot as plt import itertools from skimage import measure", "np.interp(np.linspace(0, ttl_dtp, nbin + 1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the distance map", "if(label is not None): avg_ents = np.full(max_eval, np.nan) avg_purs = np.full(max_eval, np.nan) i", "1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher =", "\"\"\" plt.figure(figsize=(9, 7)) # Plotting the response for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') #", "np.shape(in_data)[1] som = MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som", "+ 2])))) thres_mask = (image <= bins[i]) markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols +", "cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i] =", "perform the search, by default 20 label : np.array or list, optional the", "on the winning position for the sample xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0,", "\"\"\" dim = 5 * np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if dim < 10:", "in range(len_watershed): param = {'watershed idx': i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] = gen_e_model(n_map,", "for training Parameters ---------- in_data : np.array or list data matrix seed :", "iter_cnt, verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map", "mins_idx: class_counter[value[idx[0], idx[1]]] += 1 cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl return", "\"\"\"Assign cluster number to the mask's border indexes by using the K-nearest neighbor", "it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask = (image <= bins[i]) markers = measure.label(thres_mask,", ": float, optional learning rate, by default 0.5 sigma : float, optional spread", ": np.array predicted cluster labels from watershed segmentation n_map : np.array array of", "= np.array(vals).T for b in borders: # find index of the closest k", "ncols, image.shape[0], image.shape[1])) for i in range(num_bins): val = filters.threshold_local(image, block_size=3 + 2*i)", "np.array numpy array of predicted cluster labels from each watershed level \"\"\" ncols", "of max iterartion to perform the search, by default 20 label : np.array", "distance.cdist([b], vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label def KNN(value, k=5,", "str, optional the name which will be used to save the plot as", "import measure from skimage.segmentation import random_walker from skimage import filters from scipy.spatial import", "2])))) thres_mask = (image <= bins[i]) markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3]", "the K-nearest neighbor method Parameters ---------- value : np.array numpy array of the", "best_idx.append(np.nanargmax(ch_scs)) # higher = better if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to", "of the cluster number, noted that the borders are marked with 0 k", "dimension size = 10 Parameters ---------- num_sample : int Total number of data", "+ \\ '\\niterations,' + str(it) + \\ '\\nelapsed time,' + str(et) + '\\n\\n'", "---------- som : MiniSom trained Minisom object Returns ------- np.array numpy array of", "g_sigma+1, num=30)), } return param_grid def random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform", "by default False save_name : str, optional the name which will be used", "save the plot as png file, by default 'temp' \"\"\" plt.figure(figsize=(9, 7)) #", "fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist =", "save_name + '_report.csv' print('Report saved at', fdir) mode = 'w' f1 = open(fdir,", "SOM's u-matrix or total gradient using watershed classification method Parameters ---------- image :", "integer, optional random seed for reproducibility, by default 10 Returns ------- All cluster", "/ large number of clusters (n_clusters = %d)\\ \" % (dims[i], iters[i], lrs[i],", "max_eval : int, optional number of max iterartion to perform the search, by", "+= 1 cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl return new_label def watershed_level(image,", "random from acse_9_irp_wafflescore import MiscHelpers as mh import logging import sys logging.basicConfig(format='%(asctime)s |", "+ \\ '\\nelapsed time,' + str(et) + '\\n\\n' # save report to file", "ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return hist[1] def gen_e_model(n_map, som_label):", "= np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, dims, iters, lrs, sigmas, best_idx) else:", "of all the histogram bins \"\"\" ttl_dtp = len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin", "number to the mask's border indexes by using the K-nearest neighbor method Parameters", "sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label is not", "markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers) - 1 ws_labels[i*ncols + 1] =", "= MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som def plot_som(som,", "saved at', fdir) mode = 'w' f1 = open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report))", "origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1], origin='lower')", "try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] = -1 if(label is not None):", "som.winner(xx) # getting the winner # palce a marker on the winning position", "np.full(len_watershed, np.nan) if(label is not None): avg_ents = np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed,", "by default 2.5 Returns ------- MiniSom an object of Minisom class, see minisom.py", "plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return hist[1] def gen_e_model(n_map, som_label): \"\"\"generates the Earth model", "n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels def eval_ws(in_data, ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate", "= np.array(vals).T for b in borders: # find index of the closest value", "of the closest k neighbors dist = distance.cdist([b], vals) c_idx = np.argpartition(dist, k)", "numpy array of all the histogram bins plot : bool, optional flag whether", "thres_mask = (image <= bins[i]) markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3] =", "np.nan) for i in range(len_watershed): param = {'watershed idx': i} if(len(np.unique(ws_labels[i])) > 1):", "False conn : int, optional connectivity flag for measure.label, by default None Returns", "bins : np.array numpy array of all the histogram bins plot : bool,", "seed : integer, optional random seed for reproducibility, by default 10 Returns -------", "plot=False, conn=None): num_bins = len(bins) \"\"\"Computes and classify the SOM's u-matrix or total", "avg_purs = np.full(len_watershed, np.nan) for i in range(len_watershed): param = {'watershed idx': i}", "not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1 best_idx", ": int Total number of data points that will populate the SOMs Returns", "len_watershed = ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed, np.nan) ch_scs =", "neuron map\"\"\" som_class = [] for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def", "dimension. \"\"\" dim = 5 * np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if dim <", "= random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols +", "class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl return new_label def watershed_level(image, bins, border_width=0.1, plot=False, conn=None):", "som.train_random(in_data, iter_cnt, verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins)", "= np.shape(in_data)[1] som = MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return", "def som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs model for training Parameters", "is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1", "histogram bins plot : bool, optional flag whether to plot the watershed level", "0) if(len(vals[0]) < 5): logging.info(\"Not enough labeled neighbor to perform KNN.\\n\\ Will return", "2.5 Returns ------- MiniSom an object of Minisom class, see minisom.py for further", "= %d)\\ \" % (dims[i], iters[i], lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f,", "markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3] = closest_n(markers) - 1 ws_labels[i*ncols +", "mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i])) i += 1 best_idx =", "+ 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i,", "method Parameters ---------- image : np.array u-matrix or total gradient of the SOMs", "using the closest neighbor's value Parameters ---------- value : np.array numpy array of", "cmap='bone_r') # plotting the distance map as background plt.colorbar() for t, xx in", "mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] = -1 if(label is not None): avg_ents[i], avg_purs[i] =", "point save : bool, optional flag, by default False save_name : str, optional", "random.sample(v, 1)[0] for k, v in param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i] = list(random_params.values())", "the cluster number, noted that the borders are marked with 0 Returns -------", "np.array or list, optional the true label of each data point Returns -------", "np.array(np.where(value == 0)).T new_label = np.array(value) vals = np.where(value != 0) vals =", "np.array or list, optional the true label of each data point, by default", "list data matrix seed : integer random seed for reproducibility dim : int", ": np.array or list, optional the true label of each data point Returns", "= 10 Parameters ---------- num_sample : int Total number of data points that", "dim, lr, sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels", "model from neuron map\"\"\" som_class = [] for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return", "i in range(len_watershed): param = {'watershed idx': i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] =", "matplotlib.pyplot as plt import itertools from skimage import measure from skimage.segmentation import random_walker", "\"\"\"Assign cluster number to the mask's border indexes by using the closest neighbor's", "the plot as png file, by default 'temp' \"\"\" plt.figure(figsize=(9, 7)) # Plotting", "value : np.array numpy array of the cluster number, noted that the borders", "if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:', fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates a", "optional connectivity flag for measure.label, by default None Returns ------- np.array numpy array", "(dims[i], iters[i], lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" %", "the SOMs. This function returns the dimension size of the SOMs. The size", "by default 10 Returns ------- All cluster label and its counterpart parameters. \"\"\"", "if(save): save_dir = 'SOMs_results/' + save_name + '_plot.png' plt.savefig(save_dir) print('Plot saved at:', save_dir)", "int Total number of data points that will populate the SOMs Returns -------", "compute_dim(num_sample): \"\"\" Compute a default dimension of the SOMs. This function returns the", "< max_eval: random_params = {k: random.sample(v, 1)[0] for k, v in param_grid.items()} dims[i],", "acse_9_irp_wafflescore import MiscHelpers as mh import logging import sys logging.basicConfig(format='%(asctime)s | %(levelname)s :", "print('Plot saved at:', save_dir) plt.show() def save_som_report(som, save_name, it, et, report=None): param_vals =", "matrix ws_labels : np.array predicted cluster labels from watershed segmentation n_map : np.array", "cluster number to the mask's border indexes by using the K-nearest neighbor method", "method='som_watershed', param=param) try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] = -1 if(label is", "not None): avg_ents = np.full(max_eval, np.nan) avg_purs = np.full(max_eval, np.nan) i = 0", "connectivity=conn) ws_labels[i*ncols + 3] = closest_n(markers) - 1 ws_labels[i*ncols + 4] = KNN(markers)", "more than one set \"\"\" len_watershed = ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils", "1)[0] for k, v in param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i] = list(random_params.values()) som", "distance matrix iter_cnt : integer number of iterations for SOMs to perform lr", "at', fdir) mode = 'w' f1 = open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n')", "ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols +", "rate sigma : float spread of the neighborhood function, by default 2.5dim :", "populate, by default 10 Returns ------- np.array numpy array of all the histogram", "save=False, save_name='temp'): \"\"\"plots the distance map / u-matrix of the SOMs along with", "i < max_eval: random_params = {k: random.sample(v, 1)[0] for k, v in param_grid.items()}", "ch_scs, dims, iters, lrs, sigmas, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx], dims[best_idx], iters[best_idx],", "the SOMs distance matrix lr : float, optional learning rate, by default 0.5", ": np.array numpy array of all the histogram bins plot : bool, optional", "+ str(it) + \\ '\\nelapsed time,' + str(et) + '\\n\\n' # save report", ": np.array or list data matrix dim : int dimension of the SOMs", "plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the distance map as background plt.colorbar() for t, xx", "np.nan) avg_purs = np.full(len_watershed, np.nan) for i in range(len_watershed): param = {'watershed idx':", "+ 1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols +", "label : np.array or list, optional the true label of each data point", "plt.subplots(1, 2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image, 10), density=True)", "max_dim+1)), 'iter_cnt': list(range(g_it - 500, g_it + 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10,", "= len(bins) \"\"\"Computes and classify the SOM's u-matrix or total gradient using watershed", "set \"\"\" len_watershed = ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed, np.nan)", "som = MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som def", "+ 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i,", "10 else 10 max_dim = g_dim + 10 if g_dim + 10 >", ": int, optional number of max iterartion to perform the search, by default", "cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl return new_label def watershed_level(image, bins, border_width=0.1,", "default 0.5 sigma : float, optional spread of the neighborhood function, by default", "gradient using watershed classification method Parameters ---------- image : np.array u-matrix or total", "_ = eval_ws(in_data, ws_labels, n_map) return som, cluster_labels def gen_param_grid(init_guess): g_dim, g_it, g_lr,", "= cl return new_label def watershed_level(image, bins, border_width=0.1, plot=False, conn=None): num_bins = len(bins)", "2] = random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols", "= (image < val) markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers) - 1", "---------- in_data : np.array or list data matrix init_guess : tuple list of", "1]))) class_counter = Counter() for idx in mins_idx: class_counter[value[idx[0], idx[1]]] += 1 cl", "image.shape[1])) for i in range(num_bins): val = filters.threshold_local(image, block_size=3 + 2*i) block_mask =", "small / large number of clusters (n_clusters = %d)\\ \" % (dims[i], iters[i],", "Returns ------- All cluster label and its counterpart parameters. \"\"\" random.seed(seed) param_grid =", "index populated \"\"\" borders = np.array(np.where(value == border_val)).T new_label = np.array(value) vals =", "used to save the plot as png file, by default 'temp' \"\"\" plt.figure(figsize=(9,", "som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/' + save_name + '_plot.png' plt.savefig(save_dir) print('Plot", "which will be used to save the plot as png file, by default", "of the SOMs distance matrix iter_cnt : integer number of iterations for SOMs", "if g_dim - 5 > 10 else 10 max_dim = g_dim + 10", "returned is sqrt(5 * sqrt(num_sample)), with the exception that the minimum dimension size", "return the original inputted value.\") return value vals = np.array(vals).T for b in", "border indexes by using the closest neighbor's value Parameters ---------- value : np.array", "random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0]))))", "= 5 * np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if dim < 10: return 10", "lr, sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels =", "g_dim + 10 > 10 else 20 param_grid = { 'dim': list(range(min_dim, max_dim+1)),", "may contain more than one set \"\"\" len_watershed = ws_labels.shape[0] cluster_labels = np.zeros((len_watershed,", "g_it, g_lr, g_sigma = init_guess min_dim = g_dim - 10 if g_dim -", ": int, optional number of neighbor to consider, by default 5 Returns -------", "np.nan) avg_purs = np.full(max_eval, np.nan) i = 0 while i < max_eval: random_params", "np.array or list data matrix init_guess : tuple list of initial guess of", "of iterations for SOMs to perform lr : float learning rate sigma :", "= 6 if(plot): fig, axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True) ax", "from minisom import MiniSom, asymptotic_decay import numpy as np import matplotlib.pyplot as plt", "the SOMs Parameters ---------- som : MiniSom trained Minisom object Returns ------- np.array", "ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols", "ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask = (image <= bins[i])", "None): avg_ents = np.full(max_eval, np.nan) avg_purs = np.full(max_eval, np.nan) i = 0 while", "num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return param_grid def random_search_som(in_data, init_guess, max_eval=20, label=None,", "the label Parameters ---------- som : MiniSom trained Minisom object in_data : np.array", "same number of data points Parameters ---------- in_data : np.array or list data", "optional flag whether to plot the watershed level or not, by default False", "default_timer as timer import random from acse_9_irp_wafflescore import MiscHelpers as mh import logging", "filters from scipy.spatial import distance from collections import Counter from timeit import default_timer", "= list(random_params.values()) som = som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix", "= mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] =", "= np.where(value != 0) vals = np.array(vals).T for b in borders: # find", "bins to populate, by default 10 Returns ------- np.array numpy array of all", "the histogram bins plot : bool, optional flag whether to plot the watershed", "np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan) ch_scs = np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval, len(in_data)))", "\"\"\" ncols = 6 if(plot): fig, axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True,", "conn : int, optional connectivity flag for measure.label, by default None Returns -------", "cluster_labels, _, _ = eval_ws(in_data, ws_labels, n_map) return som, cluster_labels def gen_param_grid(init_guess): g_dim,", "= Counter() for idx in mins_idx: class_counter[value[idx[0], idx[1]]] += 1 cl = class_counter.most_common(1)[0][0]", "np.array u-matrix or total gradient of the SOMs bins : np.array numpy array", "dims = np.zeros(max_eval) iters = np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils", "# higher = better if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0", "save_dir) plt.show() def save_som_report(som, save_name, it, et, report=None): param_vals = str(save_name) + '\\n---'", "fdir = save_name + '_report.csv' print('Report saved at', fdir) mode = 'w' f1", "min_dim = g_dim - 10 if g_dim - 5 > 10 else 10", "in range(num_bins): val = filters.threshold_local(image, block_size=3 + 2*i) block_mask = (image < val)", "= np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval, len(in_data))) if(label is not None): avg_ents =", "% (dims[i], iters[i], lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\"", "that the borders are marked with 0 Returns ------- np.array new label with", "= 'w' f1 = open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved", "gen_e_model(n_map, som_label): \"\"\"generates the Earth model from neuron map\"\"\" som_class = [] for", "marked with 0 Returns ------- np.array new label with all the border index", "i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value): \"\"\"Assign cluster number to the", "cluster label \"\"\" som = som_assemble(in_data, seed, dim, lr, sigma) som.train_random(in_data, iter_cnt, verbose=False)", "+= 1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher", "predicted cluster labels from watershed segmentation n_map : np.array array of the winner", "cluster labels from watershed segmentation n_map : np.array array of the winner neuron", "or list, optional the true label of each data point Returns ------- np.array", "avg_sils = np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed, np.nan) if(label is not None): avg_ents", "init_guess, max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform random search for SOMs best parameters. Parameters", "best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher = better", "iters[i], lrs[i], sigmas[i] = list(random_params.values()) som = som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data,", "origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels def eval_ws(in_data,", "np.array new label with all the border index populated \"\"\" borders = np.array(np.where(value", "default 10 Returns ------- np.array numpy array of all the histogram bins \"\"\"", "optional learning rate, by default 0.5 sigma : float, optional spread of the", "closest_n(markers) - 1 ws_labels[i*ncols + 1] = KNN(markers) - 1 ws_labels[i*ncols + 2]", "find index of the closest value c_idx = distance.cdist([b], vals).argmin() new_label[b[0], b[1]] =", "b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label def KNN(value, k=5, border_val=0): \"\"\"Assign cluster", "np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed, np.nan) for i in range(len_watershed): param = {'watershed", "nbin : int number of bins to populate, by default 10 Returns -------", "list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return param_grid def random_search_som(in_data,", "iters, lrs, sigmas, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx], dims[best_idx], iters[best_idx], lrs[best_idx], sigmas[best_idx])", "label and its counterpart parameters. \"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess) dims = np.zeros(max_eval)", "som = som_assemble(in_data, seed, dim, lr, sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix = som.distance_map().T", "data points that will populate the SOMs Returns ------- int Ideal dimension. \"\"\"", "ch_scs[i] = -1 if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx", "# closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1 best_idx = np.unique(best_idx) if(re_all):", "the SOMs along with the label Parameters ---------- som : MiniSom trained Minisom", "5): logging.info(\"Not enough labeled neighbor to perform KNN.\\n\\ Will return the original inputted", "ch_scs = np.full(len_watershed, np.nan) if(label is not None): avg_ents = np.full(len_watershed, np.nan) avg_purs", "borders = np.array(np.where(value == 0)).T new_label = np.array(value) vals = np.where(value != 0)", "som = som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix = som.distance_map().T", "k : int, optional number of neighbor to consider, by default 5 Returns", "+ 5])))) return ws_labels def eval_ws(in_data, ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate and return", "| %(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute a default dimension", "Plotting the response for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the distance map", "by default None Returns ------- np.array numpy array of predicted cluster labels from", "or list data matrix label : np.array or list the true label of", "distance.cdist([b], vals) c_idx = np.argpartition(dist, k) c_idx = c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx,", "param_grid def random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform random search for SOMs", "ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except:", "return np.interp(np.linspace(0, ttl_dtp, nbin + 1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the distance", "in_data : np.array or list data matrix seed : integer random seed for", "np.array numpy array of the cluster number, noted that the borders are marked", "SOMs best parameters. Parameters ---------- in_data : np.array or list data matrix init_guess", "= { 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it - 500, g_it + 500, 200)),", "k=5, border_val=0): \"\"\"Assign cluster number to the mask's border indexes by using the", "of neighbor to consider, by default 5 Returns ------- np.array new label with", "stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute a default dimension of the SOMs. This function", "the border index populated \"\"\" borders = np.array(np.where(value == border_val)).T new_label = np.array(value)", "np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan) ch_scs =", "= distance.cdist([b], vals) c_idx = np.argpartition(dist, k) c_idx = c_idx[0, :k] mins_idx =", "+ 2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2]))))", "the closest k neighbors dist = distance.cdist([b], vals) c_idx = np.argpartition(dist, k) c_idx", "+ '\\n---' + \\ '\\niterations,' + str(it) + \\ '\\nelapsed time,' + str(et)", "+ 1] = KNN(markers) - 1 ws_labels[i*ncols + 2] = random_walker(image, markers) if(plot):", "init_guess : tuple list of initial guess of the parameters, in order of", "np.zeros((max_eval, len(in_data))) if(label is not None): avg_ents = np.full(max_eval, np.nan) avg_purs = np.full(max_eval,", "avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i])) i += 1", "of the SOMs Parameters ---------- som : MiniSom trained Minisom object Returns -------", "= np.zeros(max_eval) iters = np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils =", "skimage import filters from scipy.spatial import distance from collections import Counter from timeit", "the borders are marked with 0 k : int, optional number of neighbor", "i = 0 while i < max_eval: random_params = {k: random.sample(v, 1)[0] for", "500, g_it + 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1,", "= np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan) ch_scs", "sigma=%.6f\\ result to very small / large number of clusters (n_clusters = %d)\\", "label : np.array or list, optional the true label of each data point,", "neighbor's value Parameters ---------- value : np.array numpy array of the cluster number,", "or total gradient of the SOMs bins : np.array numpy array of all", "len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={}", "avg_sils[i], ch_scs[i] = _c[0], _as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters < 5 or", "np.array array of the winner neuron label : np.array or list, optional the", "\"\"\"generates the Earth model from neuron map\"\"\" som_class = [] for i in", "Ideal dimension. \"\"\" dim = 5 * np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if dim", "= -1 if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx =", "ws_labels[i*ncols] = closest_n(markers) - 1 ws_labels[i*ncols + 1] = KNN(markers) - 1 ws_labels[i*ncols", "indexes by using the closest neighbor's value Parameters ---------- value : np.array numpy", "best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs,", "1 best_idx.append(np.nanargmax(ch_scs)) # higher = better if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest", "KNN.\\n\\ Will return the original inputted value.\") return value vals = np.array(vals).T for", "if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" %", "+ 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels def eval_ws(in_data, ws_labels, n_map,", "for t, xx in zip(label, in_data): w = som.winner(xx) # getting the winner", "re_all=False): \"\"\"perform random search for SOMs best parameters. Parameters ---------- in_data : np.array", "ws_labels[i*ncols + 4] = KNN(markers) - 1 ws_labels[i*ncols + 5] = random_walker(image, markers)", "default 2.5dim : int seed : integer, optional random seed for reproducibility, by", "vals) c_idx = np.argpartition(dist, k) c_idx = c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx, 0],", "search, by default 20 label : np.array or list, optional the true label", "the winning position for the sample xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0],", "origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols +", "+ 2*i) block_mask = (image < val) markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] =", "= [] for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value): \"\"\"Assign cluster", "that will populate the SOMs Returns ------- int Ideal dimension. \"\"\" dim =", "of each data point save : bool, optional flag, by default False save_name", "prediction result Parameters ---------- in_data : np.array or list data matrix ws_labels :", "np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the distance map / u-matrix of the SOMs", "def save_som_report(som, save_name, it, et, report=None): param_vals = str(save_name) + '\\n---' + \\", "hist[1] def gen_e_model(n_map, som_label): \"\"\"generates the Earth model from neuron map\"\"\" som_class =", "2.5dim : int seed : integer, optional random seed for reproducibility, by default", ": float spread of the neighborhood function, by default 2.5dim : int seed", "label=None, seed=10, re_all=False): \"\"\"perform random search for SOMs best parameters. Parameters ---------- in_data", "%(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute a default dimension of", "ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt, lr, sigma, seed=10): \"\"\"Method to fully run SOMs", "avg_sils, ch_scs, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt, lr,", "asymptotic_decay import numpy as np import matplotlib.pyplot as plt import itertools from skimage", "+ 3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols +", "= measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers) - 1 ws_labels[i*ncols + 1] = KNN(markers)", ": np.array or list the true label of each data point save :", "watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) _c, _as, _ch", "n_clusters > 30): logging.info(\"Random search using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result to very", "1 ws_labels[i*ncols + 1] = KNN(markers) - 1 ws_labels[i*ncols + 2] = random_walker(image,", "from skimage import measure from skimage.segmentation import random_walker from skimage import filters from", "plt.colorbar() for t, xx in zip(label, in_data): w = som.winner(xx) # getting the", "np.array or list data matrix ws_labels : np.array predicted cluster labels from watershed", "continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i],", "in_data): w = som.winner(xx) # getting the winner # palce a marker on", "will be used to save the plot as png file, by default 'temp'", "len(np.unique(ws_labels[i*ncols + 2])))) thres_mask = (image <= bins[i]) markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols", "------- int Ideal dimension. \"\"\" dim = 5 * np.sqrt(num_sample) dim = np.int(np.sqrt(dim))", "avg_ents = np.full(max_eval, np.nan) avg_purs = np.full(max_eval, np.nan) i = 0 while i", "save : bool, optional flag, by default False save_name : str, optional the", "of the SOMs bins : np.array numpy array of all the histogram bins", "new label with all the border index populated \"\"\" borders = np.array(np.where(value ==", "winning position for the sample xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0,", "sharex=True, sharey=True) ax = axes.ravel() ws_labels = np.zeros((num_bins * ncols, image.shape[0], image.shape[1])) for", "iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label,", "neighbor to consider, by default 5 Returns ------- np.array new label with all", "= c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter = Counter() for", "of the winner neuron label : np.array or list, optional the true label", "background plt.colorbar() for t, xx in zip(label, in_data): w = som.winner(xx) # getting", "_c[0], _as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters < 5 or n_clusters > 30):", "= np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx])", "or list data array nbin : int number of bins to populate, by", "histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) _c, _as, _ch = eval_ws(in_data,", "n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn:", "array of all the histogram bins plot : bool, optional flag whether to", "new_label def watershed_level(image, bins, border_width=0.1, plot=False, conn=None): num_bins = len(bins) \"\"\"Computes and classify", "default 'temp' \"\"\" plt.figure(figsize=(9, 7)) # Plotting the response for each litho-class plt.pcolor(som.distance_map().T,", "'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it - 500, g_it + 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25),", "SOMs Parameters ---------- som : MiniSom trained Minisom object Returns ------- np.array numpy", "of the cluster number, noted that the borders are marked with 0 Returns", "data point, by default None seed : integer, optional random seed for reproducibility,", "------- np.array numpy array of all the histogram bins \"\"\" ttl_dtp = len(in_data)", "minisom.py for further details \"\"\" # Initialization som and weights num_features = np.shape(in_data)[1]", "n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters < 5 or n_clusters > 30): logging.info(\"Random search using", "1 ws_labels[i*ncols + 5] = random_walker(image, markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3],", "label Parameters ---------- som : MiniSom trained Minisom object in_data : np.array or", "= som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) cluster_labels,", "= np.full(max_eval, np.nan) avg_purs = np.full(max_eval, np.nan) i = 0 while i <", "= closest_n(markers) - 1 ws_labels[i*ncols + 4] = KNN(markers) - 1 ws_labels[i*ncols +", "---------- in_data : np.array or list data array nbin : int number of", "n_map = som.neuron_map(in_data) _c, _as, _ch = eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i]", "> 10 else 20 param_grid = { 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it -", "= _c[0], _as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters < 5 or n_clusters >", ": integer, optional random seed for reproducibility, by default 10 Returns ------- minisom", "the true label of each data point, by default None seed : integer,", "= mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) #", "litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the distance map as background plt.colorbar() for t,", "def gen_e_model(n_map, som_label): \"\"\"generates the Earth model from neuron map\"\"\" som_class = []", "= value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label def KNN(value, k=5, border_val=0): \"\"\"Assign cluster number", "0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols", "def histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram where each bin will contain the same", "the histogram bins \"\"\" ttl_dtp = len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin + 1),", ": np.array numpy array of the cluster number, noted that the borders are", "for measure.label, by default None Returns ------- np.array numpy array of predicted cluster", "u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data)", "np.array(value) vals = np.where(value != 0) vals = np.array(vals).T for b in borders:", "# getting the winner # palce a marker on the winning position for", "import matplotlib.pyplot as plt import itertools from skimage import measure from skimage.segmentation import", "int Ideal dimension. \"\"\" dim = 5 * np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if", "the name which will be used to save the plot as png file,", "while i < max_eval: random_params = {k: random.sample(v, 1)[0] for k, v in", "plotting the distance map as background plt.colorbar() for t, xx in zip(label, in_data):", "number of bins to populate, by default 10 Returns ------- np.array numpy array", "object Returns ------- np.array numpy array of all the histogram bins \"\"\" f_image", "total gradient using watershed classification method Parameters ---------- image : np.array u-matrix or", "range(num_bins): val = filters.threshold_local(image, block_size=3 + 2*i) block_mask = (image < val) markers", "or n_clusters > 30): logging.info(\"Random search using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result to", "using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result to very small / large number of", "the search, by default 20 label : np.array or list, optional the true", "of the neighborhood function, by default 2.5dim : int seed : integer, optional", "SOMs model for training Parameters ---------- in_data : np.array or list data matrix", "that the minimum dimension size = 10 Parameters ---------- num_sample : int Total", "closest_n(markers) - 1 ws_labels[i*ncols + 4] = KNN(markers) - 1 ws_labels[i*ncols + 5]", "* sqrt(num_sample)), with the exception that the minimum dimension size = 10 Parameters", "watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) cluster_labels, _, _", "-1 if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = []", "avg_purs = np.full(max_eval, np.nan) i = 0 while i < max_eval: random_params =", "2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return", ": np.array u-matrix or total gradient of the SOMs bins : np.array numpy", "block_size=3 + 2*i) block_mask = (image < val) markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols]", "closest k neighbors dist = distance.cdist([b], vals) c_idx = np.argpartition(dist, k) c_idx =", "labels, may contain more than one set \"\"\" len_watershed = ws_labels.shape[0] cluster_labels =", "+ 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i,", "<NAME> GitHub: wafflescore \"\"\" from minisom import MiniSom, asymptotic_decay import numpy as np", "is not None): avg_ents = np.full(max_eval, np.nan) avg_purs = np.full(max_eval, np.nan) i =", "ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels def eval_ws(in_data, ws_labels,", "ws_labels = np.zeros((num_bins * ncols, image.shape[0], image.shape[1])) for i in range(num_bins): val =", "MiniSom trained Minisom object Returns ------- np.array numpy array of all the histogram", "number to the mask's border indexes by using the closest neighbor's value Parameters", "parameters. Parameters ---------- in_data : np.array or list data matrix init_guess : tuple", "This function returns the dimension size of the SOMs. The size returned is", "position for the sample xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]])", "import filters from scipy.spatial import distance from collections import Counter from timeit import", "+ '\\n\\n' # save report to file fdir = save_name + '_report.csv' print('Report", "the SOMs Returns ------- int Ideal dimension. \"\"\" dim = 5 * np.sqrt(num_sample)", "connectivity flag for measure.label, by default None Returns ------- np.array numpy array of", "ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2],", "= gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i] = mh.cal_har_sc(in_data,", "range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value): \"\"\"Assign cluster number to the mask's border", "%d)\\ \" % (dims[i], iters[i], lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f,", "= np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed, np.nan) if(label is", "in_data : np.array or list data matrix init_guess : tuple list of initial", "'\\n---' + \\ '\\niterations,' + str(it) + \\ '\\nelapsed time,' + str(et) +", "noted that the borders are marked with 0 k : int, optional number", "---------- in_data : np.array or list data matrix ws_labels : np.array predicted cluster", "bin will contain the same number of data points Parameters ---------- in_data :", "it, et, report=None): param_vals = str(save_name) + '\\n---' + \\ '\\niterations,' + str(it)", "f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:', fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates", "+ save_name + '_plot.png' plt.savefig(save_dir) print('Plot saved at:', save_dir) plt.show() def save_som_report(som, save_name,", "i += 1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) #", "each watershed level \"\"\" ncols = 6 if(plot): fig, axes = plt.subplots(ncols=ncols, nrows=num_bins,", "4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols", "np.full(len_watershed, np.nan) for i in range(len_watershed): param = {'watershed idx': i} if(len(np.unique(ws_labels[i])) >", "c_idx = c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter = Counter()", "markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols", "of each data point Returns ------- np.array list of best watershed labels, may", "for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the distance map as background plt.colorbar()", "return (cluster_labels, avg_sils, ch_scs, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim,", "np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return param_grid def random_search_som(in_data, init_guess,", "SOMs. This function returns the dimension size of the SOMs. The size returned", "iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label", "closest to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, best_idx) else:", "list(random_params.values()) som = som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix =", "palce a marker on the winning position for the sample xx plt.text(w[0]+.5, w[1]+.5,", "= np.full(max_eval, np.nan) ch_scs = np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval, len(in_data))) if(label is", "to the mask's border indexes by using the closest neighbor's value Parameters ----------", "weights num_features = np.shape(in_data)[1] som = MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed)", "sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix,", "\"\"\"Evaluate and return the best watershed prediction result Parameters ---------- in_data : np.array", "return dim def som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs model for", "(cluster_labels, avg_sils, ch_scs, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt,", "SOMs Returns ------- int Ideal dimension. \"\"\" dim = 5 * np.sqrt(num_sample) dim", "n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i], iters[i], lrs[i], sigmas[i],", "the distance map / u-matrix of the SOMs along with the label Parameters", "Will return the original inputted value.\") return value vals = np.array(vals).T for b", "label with all the border index populated \"\"\" borders = np.array(np.where(value == 0)).T", "cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return hist[1] def gen_e_model(n_map, som_label): \"\"\"generates", "\"\"\" Author: <NAME> GitHub: wafflescore \"\"\" from minisom import MiniSom, asymptotic_decay import numpy", "lr : float learning rate sigma : float spread of the neighborhood function,", "by using the closest neighbor's value Parameters ---------- value : np.array numpy array", "the winner neuron label : np.array or list, optional the true label of", "borders = np.array(np.where(value == border_val)).T new_label = np.array(value) vals = np.where(value != 0)", "list, optional the true label of each data point Returns ------- np.array list", "neighborhood function, by default 2.5 Returns ------- MiniSom an object of Minisom class,", "Parameters ---------- in_data : np.array or list data matrix seed : integer random", "vals = np.where(value != 0) if(len(vals[0]) < 5): logging.info(\"Not enough labeled neighbor to", "default 5 Returns ------- np.array new label with all the border index populated", "np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed, np.nan) if(label is not", "len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={}", "learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som def plot_som(som, in_data, label, save=False, save_name='temp'): \"\"\"plots", "list data array nbin : int number of bins to populate, by default", "distance map / u-matrix of the SOMs along with the label Parameters ----------", "numpy array of predicted cluster labels from each watershed level \"\"\" ncols =", "borders are marked with 0 Returns ------- np.array new label with all the", "(cluster_labels, avg_sils, ch_scs, dims, iters, lrs, sigmas, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx],", "------- np.array numpy array of predicted cluster labels from each watershed level \"\"\"", "minisom minisom object np.array cluster label \"\"\" som = som_assemble(in_data, seed, dim, lr,", ": integer random seed for reproducibility dim : int dimension of the SOMs", "mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter = Counter() for idx in mins_idx:", "of initial guess of the parameters, in order of dimension, number of iterations,", "np.zeros((num_bins * ncols, image.shape[0], image.shape[1])) for i in range(num_bins): val = filters.threshold_local(image, block_size=3", "from scipy.spatial import distance from collections import Counter from timeit import default_timer as", "= KNN(markers) - 1 ws_labels[i*ncols + 5] = random_walker(image, markers) if(plot): ax[i*ncols +", "0 while i < max_eval: random_params = {k: random.sample(v, 1)[0] for k, v", "exception that the minimum dimension size = 10 Parameters ---------- num_sample : int", "sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i], iters[i], lrs[i],", "return new_label def watershed_level(image, bins, border_width=0.1, plot=False, conn=None): num_bins = len(bins) \"\"\"Computes and", "vals = np.array(vals).T for b in borders: # find index of the closest", "label, save=False, save_name='temp'): \"\"\"plots the distance map / u-matrix of the SOMs along", "np.array predicted cluster labels from watershed segmentation n_map : np.array array of the", "to the mask's border indexes by using the K-nearest neighbor method Parameters ----------", "som and weights num_features = np.shape(in_data)[1] som = MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr,", "avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs))", "2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask = (image <= bins[i]) markers =", "all the border index populated \"\"\" borders = np.array(np.where(value == 0)).T new_label =", "Returns ------- np.array numpy array of predicted cluster labels from each watershed level", "new_label def KNN(value, k=5, border_val=0): \"\"\"Assign cluster number to the mask's border indexes", "return the best watershed prediction result Parameters ---------- in_data : np.array or list", "labeled neighbor to perform KNN.\\n\\ Will return the original inputted value.\") return value", "b in borders: # find index of the closest k neighbors dist =", ":k] mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter = Counter() for idx in", "timer import random from acse_9_irp_wafflescore import MiscHelpers as mh import logging import sys", "\"\"\" borders = np.array(np.where(value == 0)).T new_label = np.array(value) vals = np.where(value !=", "mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher", "higher = better if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs)))", "nbin + 1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the distance map / u-matrix", "dim def som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs model for training", "array of all the histogram bins \"\"\" f_image = som_u_mat.flatten() fig, (ax1, ax2)", "seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten())", "return param_grid def random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform random search for", "'_report.csv' print('Report saved at', fdir) mode = 'w' f1 = open(fdir, mode) f1.write(param_vals)", "seed : integer random seed for reproducibility dim : int dimension of the", "of each data point, by default None seed : integer, optional random seed", "save_dir = 'SOMs_results/' + save_name + '_plot.png' plt.savefig(save_dir) print('Plot saved at:', save_dir) plt.show()", "each bin will contain the same number of data points Parameters ---------- in_data", "seed=10): \"\"\"Method to fully run SOMs Parameters ---------- in_data : np.array or list", "saved at:', save_dir) plt.show() def save_som_report(som, save_name, it, et, report=None): param_vals = str(save_name)", "histedges_equalN(f_image, 10), density=True) return hist[1] def gen_e_model(n_map, som_label): \"\"\"generates the Earth model from", "+ 2] = random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i,", "Total number of data points that will populate the SOMs Returns ------- int", "spread of the neighborhood function, by default 2.5dim : int seed : integer,", "def KNN(value, k=5, border_val=0): \"\"\"Assign cluster number to the mask's border indexes by", "Parameters ---------- in_data : np.array or list data matrix dim : int dimension", "np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed, np.nan) if(label is not None): avg_ents = np.full(len_watershed,", "not, by default False conn : int, optional connectivity flag for measure.label, by", "response for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the distance map as background", "= som.neuron_map(in_data) cluster_labels, _, _ = eval_ws(in_data, ws_labels, n_map) return som, cluster_labels def", "f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:', fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram", "map / u-matrix of the SOMs Parameters ---------- som : MiniSom trained Minisom", "ws_labels[i*ncols + 1] = KNN(markers) - 1 ws_labels[i*ncols + 2] = random_walker(image, markers)", "to save the plot as png file, by default 'temp' \"\"\" plt.figure(figsize=(9, 7))", "or not, by default False conn : int, optional connectivity flag for measure.label,", "float spread of the neighborhood function, by default 2.5dim : int seed :", "a histogram where each bin will contain the same number of data points", "import default_timer as timer import random from acse_9_irp_wafflescore import MiscHelpers as mh import", "in borders: # find index of the closest value c_idx = distance.cdist([b], vals).argmin()", "skimage.segmentation import random_walker from skimage import filters from scipy.spatial import distance from collections", "measure from skimage.segmentation import random_walker from skimage import filters from scipy.spatial import distance", "n_map) cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0], _as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters <", "the same number of data points Parameters ---------- in_data : np.array or list", "by default False conn : int, optional connectivity flag for measure.label, by default", "n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask = (image <= bins[i]) markers = measure.label(thres_mask, connectivity=conn)", "label of each data point save : bool, optional flag, by default False", "very small / large number of clusters (n_clusters = %d)\\ \" % (dims[i],", "object of Minisom class, see minisom.py for further details \"\"\" # Initialization som", "label of each data point, by default None seed : integer, optional random", "def compute_dim(num_sample): \"\"\" Compute a default dimension of the SOMs. This function returns", "ch=%.6f\" % (dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label is not None): avg_ents[i],", "histogram bins \"\"\" ttl_dtp = len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin + 1), np.arange(ttl_dtp),", "= init_guess min_dim = g_dim - 10 if g_dim - 5 > 10", "the mask's border indexes by using the K-nearest neighbor method Parameters ---------- value", "= plt.subplots(1, 2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image, 10),", "u-matrix or total gradient of the SOMs bins : np.array numpy array of", "number of iterations, learning rate, and sigma max_eval : int, optional number of", "# plotting the distance map as background plt.colorbar() for t, xx in zip(label,", "10 max_dim = g_dim + 10 if g_dim + 10 > 10 else", "------- np.array list of best watershed labels, may contain more than one set", "param=param) try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] = -1 if(label is not", "lrs = np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan) ch_scs = np.full(max_eval,", "if dim < 10: return 10 else: return dim def som_assemble(in_data, seed, dim,", "initial guess of the parameters, in order of dimension, number of iterations, learning", "numpy array of all the histogram bins \"\"\" ttl_dtp = len(in_data) return np.interp(np.linspace(0,", "list of initial guess of the parameters, in order of dimension, number of", "import MiniSom, asymptotic_decay import numpy as np import matplotlib.pyplot as plt import itertools", "+ str(et) + '\\n\\n' # save report to file fdir = save_name +", "0 k : int, optional number of neighbor to consider, by default 5", "in_data : np.array or list data matrix ws_labels : np.array predicted cluster labels", "perform KNN.\\n\\ Will return the original inputted value.\") return value vals = np.array(vals).T", "point, by default None seed : integer, optional random seed for reproducibility, by", "np.argpartition(dist, k) c_idx = c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter", "histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) cluster_labels, _, _ = eval_ws(in_data,", "populated \"\"\" borders = np.array(np.where(value == 0)).T new_label = np.array(value) vals = np.where(value", "fdir) mode = 'w' f1 = open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close()", "float, optional spread of the neighborhood function, by default 2.5 Returns ------- MiniSom", "np.zeros(max_eval) iters = np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils = np.full(max_eval,", "param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i] = list(random_params.values()) som = som_assemble(in_data, seed, int(dims[i]), lr=lrs[i],", "if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils)))", "\"\"\" f_image = som_u_mat.flatten() fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) fig.show()", "= filters.threshold_local(image, block_size=3 + 2*i) block_mask = (image < val) markers = measure.label(block_mask,", "avg_sils = np.full(max_eval, np.nan) ch_scs = np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval, len(in_data))) if(label", "neighborhood function, by default 2.5dim : int seed : integer, optional random seed", "total gradient of the SOMs bins : np.array numpy array of all the", "SOMs distance matrix iter_cnt : integer number of iterations for SOMs to perform", "------- All cluster label and its counterpart parameters. \"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess)", "closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher = better if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents)))", "int, optional number of neighbor to consider, by default 5 Returns ------- np.array", ": np.array or list data array nbin : int number of bins to", "fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return hist[1] def gen_e_model(n_map,", "1]] return new_label def KNN(value, k=5, border_val=0): \"\"\"Assign cluster number to the mask's", "best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx],", "method Parameters ---------- value : np.array numpy array of the cluster number, noted", "as np import matplotlib.pyplot as plt import itertools from skimage import measure from", "flag for measure.label, by default None Returns ------- np.array numpy array of predicted", "import logging import sys logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample):", "* ncols, image.shape[0], image.shape[1])) for i in range(num_bins): val = filters.threshold_local(image, block_size=3 +", "MiniSom(dim, dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som def plot_som(som, in_data,", "def run_SOMs(in_data, dim, iter_cnt, lr, sigma, seed=10): \"\"\"Method to fully run SOMs Parameters", "cluster_labels def gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma = init_guess min_dim = g_dim -", "sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix,", "Compute a default dimension of the SOMs. This function returns the dimension size", "true label of each data point Returns ------- np.array list of best watershed", "int dimension of the SOMs distance matrix lr : float, optional learning rate,", "with all the border index populated \"\"\" borders = np.array(np.where(value == 0)).T new_label", "by default 10 Returns ------- minisom minisom object np.array cluster label \"\"\" som", "= np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan) ch_scs = np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval,", "default False conn : int, optional connectivity flag for measure.label, by default None", "file, by default 'temp' \"\"\" plt.figure(figsize=(9, 7)) # Plotting the response for each", "populated \"\"\" borders = np.array(np.where(value == border_val)).T new_label = np.array(value) vals = np.where(value", "= plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return hist[1] def gen_e_model(n_map, som_label): \"\"\"generates the Earth", "data matrix dim : int dimension of the SOMs distance matrix iter_cnt :", "np.array(som_class) def closest_n(value): \"\"\"Assign cluster number to the mask's border indexes by using", "+ 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5], origin='lower')", "list data matrix ws_labels : np.array predicted cluster labels from watershed segmentation n_map", "int number of bins to populate, by default 10 Returns ------- np.array numpy", "= som.winner(xx) # getting the winner # palce a marker on the winning", "dim : int dimension of the SOMs distance matrix iter_cnt : integer number", "= np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter = Counter() for idx in mins_idx: class_counter[value[idx[0],", "border_val)).T new_label = np.array(value) vals = np.where(value != 0) if(len(vals[0]) < 5): logging.info(\"Not", "pur=%.6f\" % (avg_ents[i], avg_purs[i])) i += 1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest", "!= 0) if(len(vals[0]) < 5): logging.info(\"Not enough labeled neighbor to perform KNN.\\n\\ Will", "array of all the histogram bins \"\"\" ttl_dtp = len(in_data) return np.interp(np.linspace(0, ttl_dtp,", "= som_assemble(in_data, seed, dim, lr, sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix = som.distance_map().T watershed_bins", "rate, by default 0.5 sigma : float, optional spread of the neighborhood function,", "GitHub: wafflescore \"\"\" from minisom import MiniSom, asymptotic_decay import numpy as np import", "the neighborhood function, by default 2.5 Returns ------- MiniSom an object of Minisom", "return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt, lr, sigma, seed=10): \"\"\"Method to", "\"\"\"Computes and classify the SOM's u-matrix or total gradient using watershed classification method", "cluster_labels[i]) except: ch_scs[i] = -1 if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label,", "= open(fdir, mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:', fdir) def", "+ 5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5]))))", "def plot_u_matrix(som_u_mat): \"\"\"Plots the distance map / u-matrix of the SOMs Parameters ----------", "with the exception that the minimum dimension size = 10 Parameters ---------- num_sample", "closest_n(value): \"\"\"Assign cluster number to the mask's border indexes by using the closest", "plot the watershed level or not, by default False conn : int, optional", ": np.array or list data matrix init_guess : tuple list of initial guess", "fully run SOMs Parameters ---------- in_data : np.array or list data matrix dim", "print('Report saved at:', fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram where each bin", "\" % (dims[i], iters[i], lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f,", "except: ch_scs[i] = -1 if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i])", "None): avg_ents = np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed, np.nan) for i in range(len_watershed):", "time,' + str(et) + '\\n\\n' # save report to file fdir = save_name", "} return param_grid def random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform random search", "= histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) cluster_labels, _, _ =", "winner # palce a marker on the winning position for the sample xx", "cluster number to the mask's border indexes by using the closest neighbor's value", "number of data points that will populate the SOMs Returns ------- int Ideal", "for k, v in param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i] = list(random_params.values()) som =", "by default 2.5dim : int seed : integer, optional random seed for reproducibility,", "sigma max_eval : int, optional number of max iterartion to perform the search,", "len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={}", "the sample xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir", "save_name, it, et, report=None): param_vals = str(save_name) + '\\n---' + \\ '\\niterations,' +", "if(len(vals[0]) < 5): logging.info(\"Not enough labeled neighbor to perform KNN.\\n\\ Will return the", "connectivity=conn) ws_labels[i*ncols] = closest_n(markers) - 1 ws_labels[i*ncols + 1] = KNN(markers) - 1", "= KNN(markers) - 1 ws_labels[i*ncols + 2] = random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols +", "param = {'watershed idx': i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i]", "reproducibility dim : int dimension of the SOMs distance matrix lr : float,", "{ 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it - 500, g_it + 500, 200)), 'learning_rate':", "som.neuron_map(in_data) cluster_labels, _, _ = eval_ws(in_data, ws_labels, n_map) return som, cluster_labels def gen_param_grid(init_guess):", "c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter = Counter() for idx", ": MiniSom trained Minisom object Returns ------- np.array numpy array of all the", "dims, iters, lrs, sigmas, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx], dims[best_idx], iters[best_idx], lrs[best_idx],", "MiniSom an object of Minisom class, see minisom.py for further details \"\"\" #", "som_label): \"\"\"generates the Earth model from neuron map\"\"\" som_class = [] for i", "= np.array(value) vals = np.where(value != 0) if(len(vals[0]) < 5): logging.info(\"Not enough labeled", "+ 3] = closest_n(markers) - 1 ws_labels[i*ncols + 4] = KNN(markers) - 1", "by using the K-nearest neighbor method Parameters ---------- value : np.array numpy array", "minisom object np.array cluster label \"\"\" som = som_assemble(in_data, seed, dim, lr, sigma)", "find index of the closest k neighbors dist = distance.cdist([b], vals) c_idx =", "it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols +", "fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram where each bin will contain the", "+ 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols +", "logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i]))", "for i in range(num_bins): val = filters.threshold_local(image, block_size=3 + 2*i) block_mask = (image", "import random_walker from skimage import filters from scipy.spatial import distance from collections import", "the SOMs distance matrix iter_cnt : integer number of iterations for SOMs to", "level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute a default dimension of the SOMs. This", "avg_sils, ch_scs, dims, iters, lrs, sigmas, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx], dims[best_idx],", "ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4],", "the neighborhood function, by default 2.5dim : int seed : integer, optional random", "dim, num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som def plot_som(som, in_data, label,", "in param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i] = list(random_params.values()) som = som_assemble(in_data, seed, int(dims[i]),", "np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, dims, iters, lrs, sigmas, best_idx) else: return", "as timer import random from acse_9_irp_wafflescore import MiscHelpers as mh import logging import", "if(re_all): return (cluster_labels, avg_sils, ch_scs, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data,", "as background plt.colorbar() for t, xx in zip(label, in_data): w = som.winner(xx) #", "param_grid = { 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it - 500, g_it + 500,", "= axes.ravel() ws_labels = np.zeros((num_bins * ncols, image.shape[0], image.shape[1])) for i in range(num_bins):", "5])))) return ws_labels def eval_ws(in_data, ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate and return the", "2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask", "return value vals = np.array(vals).T for b in borders: # find index of", "closest to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, dims, iters,", "> 10 else 10 max_dim = g_dim + 10 if g_dim + 10", "10 Returns ------- minisom minisom object np.array cluster label \"\"\" som = som_assemble(in_data,", "Parameters ---------- som : MiniSom trained Minisom object in_data : np.array or list", "or list the true label of each data point save : bool, optional", "'temp' \"\"\" plt.figure(figsize=(9, 7)) # Plotting the response for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r')", "enough labeled neighbor to perform KNN.\\n\\ Will return the original inputted value.\") return", "result to very small / large number of clusters (n_clusters = %d)\\ \"", "- 5 > 10 else 10 max_dim = g_dim + 10 if g_dim", "plt.figure(figsize=(9, 7)) # Plotting the response for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting", "numpy array of the cluster number, noted that the borders are marked with", "np.where(value != 0) if(len(vals[0]) < 5): logging.info(\"Not enough labeled neighbor to perform KNN.\\n\\", "not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i]))", "borders: # find index of the closest value c_idx = distance.cdist([b], vals).argmin() new_label[b[0],", "cluster number, noted that the borders are marked with 0 k : int,", "f1.close() print('Report saved at:', fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram where each", "by default 20 label : np.array or list, optional the true label of", "itertools from skimage import measure from skimage.segmentation import random_walker from skimage import filters", "= mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] = -1 if(label is not None): avg_ents[i], avg_purs[i]", "random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform random search for SOMs best parameters.", "cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i])) i += 1 best_idx = []", "populate the SOMs Returns ------- int Ideal dimension. \"\"\" dim = 5 *", "block_mask = (image < val) markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers) -", "ws_labels : np.array predicted cluster labels from watershed segmentation n_map : np.array array", "avg_sils[i], ch_scs[i])) if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f,", ": np.array or list, optional the true label of each data point, by", "data matrix ws_labels : np.array predicted cluster labels from watershed segmentation n_map :", "(cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt, lr, sigma, seed=10): \"\"\"Method to fully", "it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols +", "original inputted value.\") return value vals = np.array(vals).T for b in borders: #", ": int dimension of the SOMs distance matrix lr : float, optional learning", "number, noted that the borders are marked with 0 k : int, optional", "== 0)).T new_label = np.array(value) vals = np.where(value != 0) vals = np.array(vals).T", "class, see minisom.py for further details \"\"\" # Initialization som and weights num_features", "(image < val) markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers) - 1 ws_labels[i*ncols", "seed=10, re_all=False): \"\"\"perform random search for SOMs best parameters. Parameters ---------- in_data :", "None seed : integer, optional random seed for reproducibility, by default 10 Returns", "size = 10 Parameters ---------- num_sample : int Total number of data points", "w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/' + save_name", "of data points Parameters ---------- in_data : np.array or list data array nbin", "best parameters. Parameters ---------- in_data : np.array or list data matrix init_guess :", "all the histogram bins plot : bool, optional flag whether to plot the", "10 if g_dim - 5 > 10 else 10 max_dim = g_dim +", "param_vals = str(save_name) + '\\n---' + \\ '\\niterations,' + str(it) + \\ '\\nelapsed", "plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/' +", "large number of clusters (n_clusters = %d)\\ \" % (dims[i], iters[i], lrs[i], sigmas[i],", "= watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) _c, _as, _ch = eval_ws(in_data, ws_labels, n_map)", "reproducibility, by default 10 Returns ------- minisom minisom object np.array cluster label \"\"\"", "+ 10 if g_dim + 10 > 10 else 20 param_grid = {", "cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed, np.nan) ch_scs = np.full(len_watershed, np.nan) if(label", "None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i])) i", "number of data points Parameters ---------- in_data : np.array or list data array", "Returns ------- minisom minisom object np.array cluster label \"\"\" som = som_assemble(in_data, seed,", "3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols", "by default 10 Returns ------- np.array numpy array of all the histogram bins", "of clusters (n_clusters = %d)\\ \" % (dims[i], iters[i], lrs[i], sigmas[i], n_clusters)) continue", "ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) cluster_labels, _, _ = eval_ws(in_data, ws_labels,", "value c_idx = distance.cdist([b], vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label", "{'watershed idx': i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data,", "dimension of the SOMs. This function returns the dimension size of the SOMs.", ": integer number of iterations for SOMs to perform lr : float learning", "whether to plot the watershed level or not, by default False conn :", "by default 0.5 sigma : float, optional spread of the neighborhood function, by", "lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs model for training Parameters ---------- in_data : np.array", "np.array or list data matrix seed : integer random seed for reproducibility dim", "of bins to populate, by default 10 Returns ------- np.array numpy array of", "result Parameters ---------- in_data : np.array or list data matrix ws_labels : np.array", "the borders are marked with 0 Returns ------- np.array new label with all", "np.array or list the true label of each data point save : bool,", "ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols +", "search for SOMs best parameters. Parameters ---------- in_data : np.array or list data", "import random from acse_9_irp_wafflescore import MiscHelpers as mh import logging import sys logging.basicConfig(format='%(asctime)s", "mask's border indexes by using the closest neighbor's value Parameters ---------- value :", "array nbin : int number of bins to populate, by default 10 Returns", "- 500, g_it + 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1,", "the Earth model from neuron map\"\"\" som_class = [] for i in range(len(n_map)):", "lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i], iters[i],", "model for training Parameters ---------- in_data : np.array or list data matrix seed", "+ 5] = random_walker(image, markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols", "avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i]", "object in_data : np.array or list data matrix label : np.array or list", "# closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher = better if(label is not None):", "1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols", "(image <= bins[i]) markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3] = closest_n(markers) -", "Counter from timeit import default_timer as timer import random from acse_9_irp_wafflescore import MiscHelpers", "ch_scs[i] = _c[0], _as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters < 5 or n_clusters", "from skimage.segmentation import random_walker from skimage import filters from scipy.spatial import distance from", "skimage import measure from skimage.segmentation import random_walker from skimage import filters from scipy.spatial", "the response for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the distance map as", "distance from collections import Counter from timeit import default_timer as timer import random", "\"\"\"plots the distance map / u-matrix of the SOMs along with the label", ": np.array or list data matrix label : np.array or list the true", "are marked with 0 k : int, optional number of neighbor to consider,", "plt.savefig(save_dir) print('Plot saved at:', save_dir) plt.show() def save_som_report(som, save_name, it, et, report=None): param_vals", "len(bins) \"\"\"Computes and classify the SOM's u-matrix or total gradient using watershed classification", "neighbor to perform KNN.\\n\\ Will return the original inputted value.\") return value vals", "ttl_dtp, nbin + 1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the distance map /", "Minisom object in_data : np.array or list data matrix label : np.array or", "is not None): avg_ents = np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed, np.nan) for i", "1 ws_labels[i*ncols + 4] = KNN(markers) - 1 ws_labels[i*ncols + 5] = random_walker(image,", "sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som def plot_som(som, in_data, label, save=False, save_name='temp'):", "of the SOMs. The size returned is sqrt(5 * sqrt(num_sample)), with the exception", "float learning rate sigma : float spread of the neighborhood function, by default", "g_it + 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)),", "for i in range(len_watershed): param = {'watershed idx': i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i]", "np.array(value) vals = np.where(value != 0) if(len(vals[0]) < 5): logging.info(\"Not enough labeled neighbor", "ttl_dtp = len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin + 1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat):", "= np.zeros((max_eval, len(in_data))) if(label is not None): avg_ents = np.full(max_eval, np.nan) avg_purs =", "cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] = -1 if(label", "---------- value : np.array numpy array of the cluster number, noted that the", "% (avg_ents[i], avg_purs[i])) i += 1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to", "else 20 param_grid = { 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it - 500, g_it", "dim = 5 * np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if dim < 10: return", "som def plot_som(som, in_data, label, save=False, save_name='temp'): \"\"\"plots the distance map / u-matrix", "of the neighborhood function, by default 2.5 Returns ------- MiniSom an object of", "logging.info(\"Not enough labeled neighbor to perform KNN.\\n\\ Will return the original inputted value.\")", "---------- in_data : np.array or list data matrix seed : integer random seed", "integer random seed for reproducibility dim : int dimension of the SOMs distance", "noted that the borders are marked with 0 Returns ------- np.array new label", "borders: # find index of the closest k neighbors dist = distance.cdist([b], vals)", "+ 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4], origin='lower')", "return 10 else: return dim def som_assemble(in_data, seed, dim, lr=0.5, sigma=2.5): \"\"\"Initialize the", "> 30): logging.info(\"Random search using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result to very small", "------- np.array numpy array of all the histogram bins \"\"\" f_image = som_u_mat.flatten()", "predicted cluster labels from each watershed level \"\"\" ncols = 6 if(plot): fig,", "10 Returns ------- np.array numpy array of all the histogram bins \"\"\" ttl_dtp", "winner neuron label : np.array or list, optional the true label of each", "int, optional number of max iterartion to perform the search, by default 20", "number of iterations for SOMs to perform lr : float learning rate sigma", "i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed',", "integer, optional random seed for reproducibility, by default 10 Returns ------- minisom minisom", "ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols", "# Plotting the response for each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the distance", "_, _ = eval_ws(in_data, ws_labels, n_map) return som, cluster_labels def gen_param_grid(init_guess): g_dim, g_it,", "len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels def eval_ws(in_data, ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate and", "= som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix = som.distance_map().T watershed_bins", "points Parameters ---------- in_data : np.array or list data array nbin : int", "as plt import itertools from skimage import measure from skimage.segmentation import random_walker from", "watershed level or not, by default False conn : int, optional connectivity flag", "dim, iter_cnt, lr, sigma, seed=10): \"\"\"Method to fully run SOMs Parameters ---------- in_data", "new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label def KNN(value, k=5, border_val=0): \"\"\"Assign", "watershed_level(image, bins, border_width=0.1, plot=False, conn=None): num_bins = len(bins) \"\"\"Computes and classify the SOM's", "= np.where(value != 0) if(len(vals[0]) < 5): logging.info(\"Not enough labeled neighbor to perform", ": str, optional the name which will be used to save the plot", "optional flag, by default False save_name : str, optional the name which will", "ncols = 6 if(plot): fig, axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True)", "map\"\"\" som_class = [] for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value):", "dimension size of the SOMs. The size returned is sqrt(5 * sqrt(num_sample)), with", "array of predicted cluster labels from each watershed level \"\"\" ncols = 6", "import itertools from skimage import measure from skimage.segmentation import random_walker from skimage import", "3] = closest_n(markers) - 1 ws_labels[i*ncols + 4] = KNN(markers) - 1 ws_labels[i*ncols", "len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={}", "< val) markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers) - 1 ws_labels[i*ncols +", "u-matrix of the SOMs Parameters ---------- som : MiniSom trained Minisom object Returns", "sys logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute a", "by default None seed : integer, optional random seed for reproducibility, by default", "float, optional learning rate, by default 0.5 sigma : float, optional spread of", "200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return param_grid", "sigmas[i], avg_sils[i], ch_scs[i])) if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1)", "return (cluster_labels, avg_sils, ch_scs, dims, iters, lrs, sigmas, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx],", "with 0 k : int, optional number of neighbor to consider, by default", ": int, optional connectivity flag for measure.label, by default None Returns ------- np.array", "max_eval: random_params = {k: random.sample(v, 1)[0] for k, v in param_grid.items()} dims[i], iters[i],", "som.pca_weights_init(in_data) return som def plot_som(som, in_data, label, save=False, save_name='temp'): \"\"\"plots the distance map", "the true label of each data point save : bool, optional flag, by", "Minisom class, see minisom.py for further details \"\"\" # Initialization som and weights", "integer number of iterations for SOMs to perform lr : float learning rate", "'iter_cnt': list(range(g_it - 500, g_it + 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)),", ": int seed : integer, optional random seed for reproducibility, by default 10", "== border_val)).T new_label = np.array(value) vals = np.where(value != 0) if(len(vals[0]) < 5):", "plt.show() def save_som_report(som, save_name, it, et, report=None): param_vals = str(save_name) + '\\n---' +", "c_idx = distance.cdist([b], vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label def", "to consider, by default 5 Returns ------- np.array new label with all the", "iter_cnt, lr, sigma, seed=10): \"\"\"Method to fully run SOMs Parameters ---------- in_data :", "at:', fdir) def histedges_equalN(in_data, nbin=10): \"\"\"generates a histogram where each bin will contain", "consider, by default 5 Returns ------- np.array new label with all the border", "num_features, sigma=sigma, learning_rate=lr, neighborhood_function='gaussian', random_seed=seed) som.pca_weights_init(in_data) return som def plot_som(som, in_data, label, save=False,", "KNN(markers) - 1 ws_labels[i*ncols + 5] = random_walker(image, markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols", "mode) f1.write(param_vals) if(report): f1.write(str(report)) f1.write('\\n\\n--------------------\\n\\n') f1.close() print('Report saved at:', fdir) def histedges_equalN(in_data, nbin=10):", "som_u_mat.flatten() fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist", "eval_ws(in_data, ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate and return the best watershed prediction result", "list data matrix dim : int dimension of the SOMs distance matrix iter_cnt", "+ 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask = (image <= bins[i]) markers", "or total gradient using watershed classification method Parameters ---------- image : np.array u-matrix", "optional number of neighbor to consider, by default 5 Returns ------- np.array new", "5 > 10 else 10 max_dim = g_dim + 10 if g_dim +", "n_map, label=None, re_all=False): \"\"\"Evaluate and return the best watershed prediction result Parameters ----------", "et, report=None): param_vals = str(save_name) + '\\n---' + \\ '\\niterations,' + str(it) +", "distance map as background plt.colorbar() for t, xx in zip(label, in_data): w =", "the closest neighbor's value Parameters ---------- value : np.array numpy array of the", "for idx in mins_idx: class_counter[value[idx[0], idx[1]]] += 1 cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]]", "20 label : np.array or list, optional the true label of each data", "with 0 Returns ------- np.array new label with all the border index populated", "0]][vals[c_idx, 1]] return new_label def KNN(value, k=5, border_val=0): \"\"\"Assign cluster number to the", "np.nan) ch_scs = np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval, len(in_data))) if(label is not None):", "Parameters ---------- in_data : np.array or list data matrix init_guess : tuple list", "report to file fdir = save_name + '_report.csv' print('Report saved at', fdir) mode", "'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return param_grid def", "Parameters ---------- value : np.array numpy array of the cluster number, noted that", "f_image = som_u_mat.flatten() fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat,", "= np.full(max_eval, np.nan) i = 0 while i < max_eval: random_params = {k:", "# save report to file fdir = save_name + '_report.csv' print('Report saved at',", "logging.info(\"Random search using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result to very small / large", "if g_dim + 10 > 10 else 20 param_grid = { 'dim': list(range(min_dim,", "dist = distance.cdist([b], vals) c_idx = np.argpartition(dist, k) c_idx = c_idx[0, :k] mins_idx", "dim = np.int(np.sqrt(dim)) if dim < 10: return 10 else: return dim def", "dimension of the SOMs distance matrix lr : float, optional learning rate, by", "1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2], origin='lower') ax[i*ncols", "to 1 best_idx.append(np.nanargmax(ch_scs)) # higher = better if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) #", "data point Returns ------- np.array list of best watershed labels, may contain more", "random.seed(seed) param_grid = gen_param_grid(init_guess) dims = np.zeros(max_eval) iters = np.zeros(max_eval) lrs = np.zeros(max_eval)", "from watershed segmentation n_map : np.array array of the winner neuron label :", "n_map) return som, cluster_labels def gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma = init_guess min_dim", "index of the closest value c_idx = distance.cdist([b], vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx,", "new_label = np.array(value) vals = np.where(value != 0) if(len(vals[0]) < 5): logging.info(\"Not enough", "details \"\"\" # Initialization som and weights num_features = np.shape(in_data)[1] som = MiniSom(dim,", "data point save : bool, optional flag, by default False save_name : str,", "10 > 10 else 20 param_grid = { 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it", "np.array or list data matrix label : np.array or list the true label", "data matrix label : np.array or list the true label of each data", "value vals = np.array(vals).T for b in borders: # find index of the", "np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def", "if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to", "file fdir = save_name + '_report.csv' print('Report saved at', fdir) mode = 'w'", "= som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) _c,", "vals[c_idx, 1]))) class_counter = Counter() for idx in mins_idx: class_counter[value[idx[0], idx[1]]] += 1", "border_val=0): \"\"\"Assign cluster number to the mask's border indexes by using the K-nearest", "1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, dims, iters, lrs, sigmas,", "index of the closest k neighbors dist = distance.cdist([b], vals) c_idx = np.argpartition(dist,", "ax = axes.ravel() ws_labels = np.zeros((num_bins * ncols, image.shape[0], image.shape[1])) for i in", "data matrix init_guess : tuple list of initial guess of the parameters, in", "\"\"\" borders = np.array(np.where(value == border_val)).T new_label = np.array(value) vals = np.where(value !=", "< 5 or n_clusters > 30): logging.info(\"Random search using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\", "\"\"\" # Initialization som and weights num_features = np.shape(in_data)[1] som = MiniSom(dim, dim,", "Returns ------- np.array numpy array of all the histogram bins \"\"\" f_image =", "perform lr : float learning rate sigma : float spread of the neighborhood", "len(in_data))) if(label is not None): avg_ents = np.full(max_eval, np.nan) avg_purs = np.full(max_eval, np.nan)", "with the label Parameters ---------- som : MiniSom trained Minisom object in_data :", "default None seed : integer, optional random seed for reproducibility, by default 10", "The size returned is sqrt(5 * sqrt(num_sample)), with the exception that the minimum", "data array nbin : int number of bins to populate, by default 10", "np.nan) i = 0 while i < max_eval: random_params = {k: random.sample(v, 1)[0]", "Initialization som and weights num_features = np.shape(in_data)[1] som = MiniSom(dim, dim, num_features, sigma=sigma,", "cl return new_label def watershed_level(image, bins, border_width=0.1, plot=False, conn=None): num_bins = len(bins) \"\"\"Computes", "closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1 best_idx = np.unique(best_idx) if(re_all): return", "(n_clusters = %d)\\ \" % (dims[i], iters[i], lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d,", "lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label is", "= {k: random.sample(v, 1)[0] for k, v in param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i]", "4] = KNN(markers) - 1 ws_labels[i*ncols + 5] = random_walker(image, markers) if(plot): ax[i*ncols", "default 10 Returns ------- All cluster label and its counterpart parameters. \"\"\" random.seed(seed)", "2], origin='lower') ax[i*ncols + 2].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 2])))) thres_mask = (image", "\"\"\"generates a histogram where each bin will contain the same number of data", "neuron label : np.array or list, optional the true label of each data", "def gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma = init_guess min_dim = g_dim - 10", "watershed_bins) n_map = som.neuron_map(in_data) _c, _as, _ch = eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i],", "num_bins*3), sharex=True, sharey=True) ax = axes.ravel() ws_labels = np.zeros((num_bins * ncols, image.shape[0], image.shape[1]))", "size of the SOMs. The size returned is sqrt(5 * sqrt(num_sample)), with the", ": np.array array of the winner neuron label : np.array or list, optional", "SOMs bins : np.array numpy array of all the histogram bins plot :", ": bool, optional flag whether to plot the watershed level or not, by", "neighbor method Parameters ---------- value : np.array numpy array of the cluster number,", "+ 3])))) ax[i*ncols + 4].imshow(ws_labels[i*ncols + 4], origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i,", "sample xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir =", "points that will populate the SOMs Returns ------- int Ideal dimension. \"\"\" dim", "number, noted that the borders are marked with 0 Returns ------- np.array new", "ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0], _as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters", "for b in borders: # find index of the closest k neighbors dist", "sigmas = np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan) ch_scs = np.full(max_eval, np.nan) cluster_labels =", "seed for reproducibility, by default 10 Returns ------- minisom minisom object np.array cluster", "1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i]", "max_dim = g_dim + 10 if g_dim + 10 > 10 else 20", "than one set \"\"\" len_watershed = ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils =", "def closest_n(value): \"\"\"Assign cluster number to the mask's border indexes by using the", "getting the winner # palce a marker on the winning position for the", "= gen_param_grid(init_guess) dims = np.zeros(max_eval) iters = np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas =", "sigma : float, optional spread of the neighborhood function, by default 2.5 Returns", "(avg_ents[i], avg_purs[i])) i += 1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1", "if(plot): fig, axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True) ax = axes.ravel()", "- 1 ws_labels[i*ncols + 5] = random_walker(image, markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols +", "k) c_idx = c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx, 1]))) class_counter =", "\"\"\" Compute a default dimension of the SOMs. This function returns the dimension", "som : MiniSom trained Minisom object in_data : np.array or list data matrix", "of all the histogram bins plot : bool, optional flag whether to plot", "1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, best_idx) else: return (cluster_labels[best_idx],", "conn=None): num_bins = len(bins) \"\"\"Computes and classify the SOM's u-matrix or total gradient", "parameters. \"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess) dims = np.zeros(max_eval) iters = np.zeros(max_eval) lrs", "iterations for SOMs to perform lr : float learning rate sigma : float", "= 'SOMs_results/' + save_name + '_plot.png' plt.savefig(save_dir) print('Plot saved at:', save_dir) plt.show() def", "spread of the neighborhood function, by default 2.5 Returns ------- MiniSom an object", "label \"\"\" som = som_assemble(in_data, seed, dim, lr, sigma) som.train_random(in_data, iter_cnt, verbose=False) u_matrix", "g_dim + 10 if g_dim + 10 > 10 else 20 param_grid =", "iter=%d, lr=%.6f, sigma=%.6f\\ result to very small / large number of clusters (n_clusters", "all the border index populated \"\"\" borders = np.array(np.where(value == border_val)).T new_label =", "function, by default 2.5dim : int seed : integer, optional random seed for", "index populated \"\"\" borders = np.array(np.where(value == 0)).T new_label = np.array(value) vals =", "None Returns ------- np.array numpy array of predicted cluster labels from each watershed", "'\\n\\n' # save report to file fdir = save_name + '_report.csv' print('Report saved", "30): logging.info(\"Random search using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result to very small /", "KNN(value, k=5, border_val=0): \"\"\"Assign cluster number to the mask's border indexes by using", "iters = np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan)", "lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i],", "1 ws_labels[i*ncols + 2] = random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn:", "---------- image : np.array u-matrix or total gradient of the SOMs bins :", "point Returns ------- np.array list of best watershed labels, may contain more than", "the SOMs. The size returned is sqrt(5 * sqrt(num_sample)), with the exception that", "ch_scs[i])) if(label is not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\"", "= mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i])) i += 1 best_idx", "ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols +", "cluster label and its counterpart parameters. \"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess) dims =", "5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return", "histogram bins \"\"\" f_image = som_u_mat.flatten() fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12,", "None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1 best_idx =", "vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]] return new_label def KNN(value, k=5, border_val=0):", "'_plot.png' plt.savefig(save_dir) print('Plot saved at:', save_dir) plt.show() def save_som_report(som, save_name, it, et, report=None):", "sigmas[i] = list(random_params.values()) som = som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False)", "np.nan) ch_scs = np.full(len_watershed, np.nan) if(label is not None): avg_ents = np.full(len_watershed, np.nan)", "origin='lower') ax[i*ncols + 4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols +", "of dimension, number of iterations, learning rate, and sigma max_eval : int, optional", "mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try: ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] = -1", "1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the distance map / u-matrix of the", "= np.zeros(max_eval) sigmas = np.zeros(max_eval) avg_sils = np.full(max_eval, np.nan) ch_scs = np.full(max_eval, np.nan)", "# palce a marker on the winning position for the sample xx plt.text(w[0]+.5,", "+ 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 1])))) ax[i*ncols + 2].imshow(ws_labels[i*ncols + 2], origin='lower')", "ws_labels[i*ncols + 3] = closest_n(markers) - 1 ws_labels[i*ncols + 4] = KNN(markers) -", "list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it - 500, g_it + 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75),", "np.int(np.sqrt(dim)) if dim < 10: return 10 else: return dim def som_assemble(in_data, seed,", "trained Minisom object in_data : np.array or list data matrix label : np.array", "and sigma max_eval : int, optional number of max iterartion to perform the", "plot as png file, by default 'temp' \"\"\" plt.figure(figsize=(9, 7)) # Plotting the", "str(et) + '\\n\\n' # save report to file fdir = save_name + '_report.csv'", "n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn:", "return som, cluster_labels def gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma = init_guess min_dim =", "np.array(np.where(value == border_val)).T new_label = np.array(value) vals = np.where(value != 0) if(len(vals[0]) <", "closest value c_idx = distance.cdist([b], vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]] return", "cluster labels from each watershed level \"\"\" ncols = 6 if(plot): fig, axes", "plot_som(som, in_data, label, save=False, save_name='temp'): \"\"\"plots the distance map / u-matrix of the", "parameters, in order of dimension, number of iterations, learning rate, and sigma max_eval", "sharey=True) ax = axes.ravel() ws_labels = np.zeros((num_bins * ncols, image.shape[0], image.shape[1])) for i", "matrix label : np.array or list the true label of each data point", "collections import Counter from timeit import default_timer as timer import random from acse_9_irp_wafflescore", "else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt, lr, sigma, seed=10): \"\"\"Method", "som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value): \"\"\"Assign cluster number to the mask's border indexes", "g_lr, g_sigma = init_guess min_dim = g_dim - 10 if g_dim - 5", "its counterpart parameters. \"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess) dims = np.zeros(max_eval) iters =", "str(it) + \\ '\\nelapsed time,' + str(et) + '\\n\\n' # save report to", "map / u-matrix of the SOMs along with the label Parameters ---------- som", ": MiniSom trained Minisom object in_data : np.array or list data matrix label", "new_label = np.array(value) vals = np.where(value != 0) vals = np.array(vals).T for b", "lr=lrs[i], sigma=sigmas[i]) som.train_random(in_data, int(iters[i]), verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels =", "sigma : float spread of the neighborhood function, by default 2.5dim : int", "will populate the SOMs Returns ------- int Ideal dimension. \"\"\" dim = 5", "cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1 best_idx.append(np.nanargmax(ch_scs)) # higher =", "import MiscHelpers as mh import logging import sys logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s',", "learning rate sigma : float spread of the neighborhood function, by default 2.5dim", "= {'watershed idx': i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i] =", "iter_cnt : integer number of iterations for SOMs to perform lr : float", "[] for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]]) return np.array(som_class) def closest_n(value): \"\"\"Assign cluster number", "= np.zeros((num_bins * ncols, image.shape[0], image.shape[1])) for i in range(num_bins): val = filters.threshold_local(image,", "minimum dimension size = 10 Parameters ---------- num_sample : int Total number of", "sqrt(num_sample)), with the exception that the minimum dimension size = 10 Parameters ----------", "function returns the dimension size of the SOMs. The size returned is sqrt(5", "ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate and return the best watershed prediction result Parameters", "iters[i], lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d, iter=%d, lr=%.6f, sigma=%.6f, sil=%.6f, ch=%.6f\" % (dims[i],", "in_data : np.array or list data array nbin : int number of bins", "list data matrix init_guess : tuple list of initial guess of the parameters,", "best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt, lr, sigma, seed=10):", "or list, optional the true label of each data point, by default None", "ws_labels[i*ncols + 2] = random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={}", "of the parameters, in order of dimension, number of iterations, learning rate, and", "xx plt.text(w[0]+.5, w[1]+.5, str(t), color=plt.cm.rainbow(t/10.)) plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/'", "for b in borders: # find index of the closest value c_idx =", "dimension of the SOMs distance matrix iter_cnt : integer number of iterations for", "or list data matrix ws_labels : np.array predicted cluster labels from watershed segmentation", "logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i])) i += 1 best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) #", "np.array or list data matrix dim : int dimension of the SOMs distance", "mh import logging import sys logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout) def", "g_dim - 5 > 10 else 10 max_dim = g_dim + 10 if", "to perform the search, by default 20 label : np.array or list, optional", "plot_u_matrix(som_u_mat): \"\"\"Plots the distance map / u-matrix of the SOMs Parameters ---------- som", "vals = np.where(value != 0) vals = np.array(vals).T for b in borders: #", "v in param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i] = list(random_params.values()) som = som_assemble(in_data, seed,", "- 1 ws_labels[i*ncols + 4] = KNN(markers) - 1 ws_labels[i*ncols + 5] =", "gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma = init_guess min_dim = g_dim - 10 if", "from collections import Counter from timeit import default_timer as timer import random from", "= plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True) ax = axes.ravel() ws_labels = np.zeros((num_bins", "random_params = {k: random.sample(v, 1)[0] for k, v in param_grid.items()} dims[i], iters[i], lrs[i],", "of all the histogram bins \"\"\" f_image = som_u_mat.flatten() fig, (ax1, ax2) =", "np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval, len(in_data))) if(label is not None): avg_ents = np.full(max_eval,", "list the true label of each data point save : bool, optional flag,", "each data point, by default None seed : integer, optional random seed for", "level or not, by default False conn : int, optional connectivity flag for", "guess of the parameters, in order of dimension, number of iterations, learning rate,", "watershed prediction result Parameters ---------- in_data : np.array or list data matrix ws_labels", "number of neighbor to consider, by default 5 Returns ------- np.array new label", "plt.axis([0, som.get_weights().shape[0], 0, som.get_weights().shape[1]]) if(save): save_dir = 'SOMs_results/' + save_name + '_plot.png' plt.savefig(save_dir)", "+ 1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the distance map / u-matrix of", "= eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0], _as[0], _ch[0] n_clusters =", "Returns ------- np.array list of best watershed labels, may contain more than one", "default 2.5 Returns ------- MiniSom an object of Minisom class, see minisom.py for", "a default dimension of the SOMs. This function returns the dimension size of", "int, optional connectivity flag for measure.label, by default None Returns ------- np.array numpy", "_c, _as, _ch = eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0], _as[0],", "for reproducibility dim : int dimension of the SOMs distance matrix lr :", "int(iters[i]), verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map", "# find index of the closest value c_idx = distance.cdist([b], vals).argmin() new_label[b[0], b[1]]", "b[1]] = cl return new_label def watershed_level(image, bins, border_width=0.1, plot=False, conn=None): num_bins =", "val = filters.threshold_local(image, block_size=3 + 2*i) block_mask = (image < val) markers =", "= histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) _c, _as, _ch =", "from skimage import filters from scipy.spatial import distance from collections import Counter from", "dims[i], iters[i], lrs[i], sigmas[i] = list(random_params.values()) som = som_assemble(in_data, seed, int(dims[i]), lr=lrs[i], sigma=sigmas[i])", "val) markers = measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers) - 1 ws_labels[i*ncols + 1]", "np.full(max_eval, np.nan) avg_purs = np.full(max_eval, np.nan) i = 0 while i < max_eval:", "return np.array(som_class) def closest_n(value): \"\"\"Assign cluster number to the mask's border indexes by", "better if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest", "number of max iterartion to perform the search, by default 20 label :", "segmentation n_map : np.array array of the winner neuron label : np.array or", "minisom import MiniSom, asymptotic_decay import numpy as np import matplotlib.pyplot as plt import", "= str(save_name) + '\\n---' + \\ '\\niterations,' + str(it) + \\ '\\nelapsed time,'", "random_walker from skimage import filters from scipy.spatial import distance from collections import Counter", "default False save_name : str, optional the name which will be used to", "see minisom.py for further details \"\"\" # Initialization som and weights num_features =", "ch_scs[i] = mh.cal_har_sc(in_data, cluster_labels[i]) except: ch_scs[i] = -1 if(label is not None): avg_ents[i],", "order of dimension, number of iterations, learning rate, and sigma max_eval : int,", "inputted value.\") return value vals = np.array(vals).T for b in borders: # find", "import numpy as np import matplotlib.pyplot as plt import itertools from skimage import", "the closest value c_idx = distance.cdist([b], vals).argmin() new_label[b[0], b[1]] = value[vals[c_idx, 0]][vals[c_idx, 1]]", "_as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters < 5 or n_clusters > 30): logging.info(\"Random", "ws_labels[i*ncols + 5] = random_walker(image, markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3], origin='lower')", "using watershed classification method Parameters ---------- image : np.array u-matrix or total gradient", "MiniSom trained Minisom object in_data : np.array or list data matrix label :", "SOMs Parameters ---------- in_data : np.array or list data matrix dim : int", "hist = plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return hist[1] def gen_e_model(n_map, som_label): \"\"\"generates the", "to very small / large number of clusters (n_clusters = %d)\\ \" %", "marked with 0 k : int, optional number of neighbor to consider, by", "- 1 ws_labels[i*ncols + 1] = KNN(markers) - 1 ws_labels[i*ncols + 2] =", "list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return param_grid def random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10, re_all=False):", "+ '_report.csv' print('Report saved at', fdir) mode = 'w' f1 = open(fdir, mode)", "bool, optional flag whether to plot the watershed level or not, by default", "dim : int dimension of the SOMs distance matrix lr : float, optional", "avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i], init_clus=-1) logging.info(\"ent=%.6f, pur=%.6f\" % (avg_ents[i], avg_purs[i])) i +=", "save_som_report(som, save_name, it, et, report=None): param_vals = str(save_name) + '\\n---' + \\ '\\niterations,'", "\"\"\"Plots the distance map / u-matrix of the SOMs Parameters ---------- som :", "matrix init_guess : tuple list of initial guess of the parameters, in order", "optional the true label of each data point, by default None seed :", "the SOMs model for training Parameters ---------- in_data : np.array or list data", "np.nan) if(label is not None): avg_ents = np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed, np.nan)", "run SOMs Parameters ---------- in_data : np.array or list data matrix dim :", "of predicted cluster labels from each watershed level \"\"\" ncols = 6 if(plot):", "10 else 20 param_grid = { 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt': list(range(g_it - 500,", "SOMs along with the label Parameters ---------- som : MiniSom trained Minisom object", "best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1 best_idx = np.unique(best_idx)", "max iterartion to perform the search, by default 20 label : np.array or", "6 if(plot): fig, axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True) ax =", "cluster_labels = np.zeros((max_eval, len(in_data))) if(label is not None): avg_ents = np.full(max_eval, np.nan) avg_purs", "= np.array(value) vals = np.where(value != 0) vals = np.array(vals).T for b in", "returns the dimension size of the SOMs. The size returned is sqrt(5 *", "+ '_plot.png' plt.savefig(save_dir) print('Plot saved at:', save_dir) plt.show() def save_som_report(som, save_name, it, et,", "> 1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i]) avg_sils[i] = mh.int_eval_silhouette(in_data, cluster_labels[i], method='som_watershed', param=param) try:", "histogram where each bin will contain the same number of data points Parameters", "Returns ------- int Ideal dimension. \"\"\" dim = 5 * np.sqrt(num_sample) dim =", "= np.array(np.where(value == 0)).T new_label = np.array(value) vals = np.where(value != 0) vals", "str(save_name) + '\\n---' + \\ '\\niterations,' + str(it) + \\ '\\nelapsed time,' +", "Minisom object Returns ------- np.array numpy array of all the histogram bins \"\"\"", "verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins) n_map =", "random_seed=seed) som.pca_weights_init(in_data) return som def plot_som(som, in_data, label, save=False, save_name='temp'): \"\"\"plots the distance", "wafflescore \"\"\" from minisom import MiniSom, asymptotic_decay import numpy as np import matplotlib.pyplot", "borders are marked with 0 k : int, optional number of neighbor to", "= (image <= bins[i]) markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3] = closest_n(markers)", "g_sigma = init_guess min_dim = g_dim - 10 if g_dim - 5 >", "len(np.unique(cluster_labels[i])) if(n_clusters < 5 or n_clusters > 30): logging.info(\"Random search using dim=%d, iter=%d,", "def eval_ws(in_data, ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate and return the best watershed prediction", "for reproducibility, by default 10 Returns ------- minisom minisom object np.array cluster label", "nbin=10): \"\"\"generates a histogram where each bin will contain the same number of", "and classify the SOM's u-matrix or total gradient using watershed classification method Parameters", "* np.sqrt(num_sample) dim = np.int(np.sqrt(dim)) if dim < 10: return 10 else: return", "in zip(label, in_data): w = som.winner(xx) # getting the winner # palce a", "iterartion to perform the search, by default 20 label : np.array or list,", "if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower') ax[i*ncols].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 0])))) ax[i*ncols +", "idx in mins_idx: class_counter[value[idx[0], idx[1]]] += 1 cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]] =", "'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), } return param_grid def random_search_som(in_data, init_guess, max_eval=20, label=None, seed=10,", "bins plot : bool, optional flag whether to plot the watershed level or", "to fully run SOMs Parameters ---------- in_data : np.array or list data matrix", "+ 500, 200)), 'learning_rate': list(np.logspace(np.log10(0.25), np.log10(0.75), base=10, num=100)), 'sigma': list(np.linspace(g_sigma-1, g_sigma+1, num=30)), }", "+ 3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 3]))))", "\"\"\"Method to fully run SOMs Parameters ---------- in_data : np.array or list data", "image : np.array u-matrix or total gradient of the SOMs bins : np.array", "matrix seed : integer random seed for reproducibility dim : int dimension of", "dim, lr=0.5, sigma=2.5): \"\"\"Initialize the SOMs model for training Parameters ---------- in_data :", "SOMs to perform lr : float learning rate sigma : float spread of", "the distance map / u-matrix of the SOMs Parameters ---------- som : MiniSom", "Parameters ---------- num_sample : int Total number of data points that will populate", "gen_param_grid(init_guess) dims = np.zeros(max_eval) iters = np.zeros(max_eval) lrs = np.zeros(max_eval) sigmas = np.zeros(max_eval)", "%(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute a default dimension of the SOMs.", "np import matplotlib.pyplot as plt import itertools from skimage import measure from skimage.segmentation", "Returns ------- MiniSom an object of Minisom class, see minisom.py for further details", "KNN(markers) - 1 ws_labels[i*ncols + 2] = random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0],", "5 or n_clusters > 30): logging.info(\"Random search using dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result", "max_eval=20, label=None, seed=10, re_all=False): \"\"\"perform random search for SOMs best parameters. Parameters ----------", "0)).T new_label = np.array(value) vals = np.where(value != 0) vals = np.array(vals).T for", "t, xx in zip(label, in_data): w = som.winner(xx) # getting the winner #", "data matrix seed : integer random seed for reproducibility dim : int dimension", "watershed level \"\"\" ncols = 6 if(plot): fig, axes = plt.subplots(ncols=ncols, nrows=num_bins, figsize=(12,", ": np.array or list data matrix seed : integer random seed for reproducibility", "= np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed, np.nan) for i in range(len_watershed): param =", ": %(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute a default dimension of the", "= np.array(np.where(value == border_val)).T new_label = np.array(value) vals = np.where(value != 0) if(len(vals[0])", "# closest to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, best_idx)", "labels from watershed segmentation n_map : np.array array of the winner neuron label", "object np.array cluster label \"\"\" som = som_assemble(in_data, seed, dim, lr, sigma) som.train_random(in_data,", "further details \"\"\" # Initialization som and weights num_features = np.shape(in_data)[1] som =", "sigma=2.5): \"\"\"Initialize the SOMs model for training Parameters ---------- in_data : np.array or", "(dims[i], iters[i], lrs[i], sigmas[i], avg_sils[i], ch_scs[i])) if(label is not None): avg_ents[i], avg_purs[i] =", ": int dimension of the SOMs distance matrix iter_cnt : integer number of", "trained Minisom object Returns ------- np.array numpy array of all the histogram bins", "1] = KNN(markers) - 1 ws_labels[i*ncols + 2] = random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols", "eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0], _as[0], _ch[0] n_clusters = len(np.unique(cluster_labels[i]))", "run_SOMs(in_data, dim, iter_cnt, lr, sigma, seed=10): \"\"\"Method to fully run SOMs Parameters ----------", "n_map : np.array array of the winner neuron label : np.array or list,", "clusters (n_clusters = %d)\\ \" % (dims[i], iters[i], lrs[i], sigmas[i], n_clusters)) continue logging.info(\"dim=%d,", "will contain the same number of data points Parameters ---------- in_data : np.array", "classify the SOM's u-matrix or total gradient using watershed classification method Parameters ----------", "!= 0) vals = np.array(vals).T for b in borders: # find index of", "best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, dims, iters, lrs, sigmas, best_idx)", "in_data : np.array or list data matrix label : np.array or list the", "g_dim - 10 if g_dim - 5 > 10 else 10 max_dim =", "class_counter = Counter() for idx in mins_idx: class_counter[value[idx[0], idx[1]]] += 1 cl =", "Returns ------- np.array new label with all the border index populated \"\"\" borders", "iterations, learning rate, and sigma max_eval : int, optional number of max iterartion", "0.5 sigma : float, optional spread of the neighborhood function, by default 2.5", "dim=%d, iter=%d, lr=%.6f, sigma=%.6f\\ result to very small / large number of clusters", "zip(label, in_data): w = som.winner(xx) # getting the winner # palce a marker", "k, v in param_grid.items()} dims[i], iters[i], lrs[i], sigmas[i] = list(random_params.values()) som = som_assemble(in_data,", "= np.full(len_watershed, np.nan) for i in range(len_watershed): param = {'watershed idx': i} if(len(np.unique(ws_labels[i]))", "= class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl return new_label def watershed_level(image, bins, border_width=0.1, plot=False,", "ax[i*ncols + 1].imshow(ws_labels[i*ncols + 1], origin='lower') ax[i*ncols + 1].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols +", "c_idx = np.argpartition(dist, k) c_idx = c_idx[0, :k] mins_idx = np.array(list(zip(vals[c_idx, 0], vals[c_idx,", "default None Returns ------- np.array numpy array of predicted cluster labels from each", "random seed for reproducibility dim : int dimension of the SOMs distance matrix", "a marker on the winning position for the sample xx plt.text(w[0]+.5, w[1]+.5, str(t),", "som.train_random(in_data, int(iters[i]), verbose=False) u_matrix = som.distance_map().T watershed_bins = histedges_equalN(u_matrix.flatten()) ws_labels = watershed_level(u_matrix, watershed_bins)", "density=True) return hist[1] def gen_e_model(n_map, som_label): \"\"\"generates the Earth model from neuron map\"\"\"", "border index populated \"\"\" borders = np.array(np.where(value == border_val)).T new_label = np.array(value) vals", ": tuple list of initial guess of the parameters, in order of dimension,", "where each bin will contain the same number of data points Parameters ----------", "np.array list of best watershed labels, may contain more than one set \"\"\"", "\"\"\"Initialize the SOMs model for training Parameters ---------- in_data : np.array or list", "xx in zip(label, in_data): w = som.winner(xx) # getting the winner # palce", "the parameters, in order of dimension, number of iterations, learning rate, and sigma", "10 Returns ------- All cluster label and its counterpart parameters. \"\"\" random.seed(seed) param_grid", "ch_scs = np.full(max_eval, np.nan) cluster_labels = np.zeros((max_eval, len(in_data))) if(label is not None): avg_ents", "of iterations, learning rate, and sigma max_eval : int, optional number of max", "10), density=True) return hist[1] def gen_e_model(n_map, som_label): \"\"\"generates the Earth model from neuron", "Earth model from neuron map\"\"\" som_class = [] for i in range(len(n_map)): som_class.append(som_label[n_map[i][0]][n_map[i][1]])", "measure.label(block_mask, connectivity=conn) ws_labels[i*ncols] = closest_n(markers) - 1 ws_labels[i*ncols + 1] = KNN(markers) -", "the distance map as background plt.colorbar() for t, xx in zip(label, in_data): w", "len(in_data) return np.interp(np.linspace(0, ttl_dtp, nbin + 1), np.arange(ttl_dtp), np.sort(in_data)) def plot_u_matrix(som_u_mat): \"\"\"Plots the", "+ 5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels", "idx[1]]] += 1 cl = class_counter.most_common(1)[0][0] new_label[b[0], b[1]] = cl return new_label def", "b in borders: # find index of the closest value c_idx = distance.cdist([b],", "learning rate, and sigma max_eval : int, optional number of max iterartion to", "it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols +", "ws_labels = watershed_level(u_matrix, watershed_bins) n_map = som.neuron_map(in_data) _c, _as, _ch = eval_ws(in_data, ws_labels,", "along with the label Parameters ---------- som : MiniSom trained Minisom object in_data", "'\\niterations,' + str(it) + \\ '\\nelapsed time,' + str(et) + '\\n\\n' # save", "nrows=num_bins, figsize=(12, num_bins*3), sharex=True, sharey=True) ax = axes.ravel() ws_labels = np.zeros((num_bins * ncols,", "rate, and sigma max_eval : int, optional number of max iterartion to perform", ": np.array or list data matrix ws_labels : np.array predicted cluster labels from", "array of the cluster number, noted that the borders are marked with 0", "label with all the border index populated \"\"\" borders = np.array(np.where(value == border_val)).T", "- 1 ws_labels[i*ncols + 2] = random_walker(image, markers) if(plot): ax[i*ncols].imshow(ws_labels[i*ncols + 0], origin='lower')", "= better if(label is not None): best_idx.append(np.nanargmin(np.array(avg_ents))) # closest to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) #", "------- minisom minisom object np.array cluster label \"\"\" som = som_assemble(in_data, seed, dim,", "random_walker(image, markers) if(plot): ax[i*ncols + 3].imshow(ws_labels[i*ncols + 3], origin='lower') ax[i*ncols + 3].title.set_text('b_cn: it={}", "\"\"\" len_watershed = ws_labels.shape[0] cluster_labels = np.zeros((len_watershed, len(in_data))) avg_sils = np.full(len_watershed, np.nan) ch_scs", "of the SOMs along with the label Parameters ---------- som : MiniSom trained", "avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest to 1", "save_name='temp'): \"\"\"plots the distance map / u-matrix of the SOMs along with the", "np.nan) cluster_labels = np.zeros((max_eval, len(in_data))) if(label is not None): avg_ents = np.full(max_eval, np.nan)", "= closest_n(markers) - 1 ws_labels[i*ncols + 1] = KNN(markers) - 1 ws_labels[i*ncols +", "u-matrix of the SOMs along with the label Parameters ---------- som : MiniSom", "the histogram bins \"\"\" f_image = som_u_mat.flatten() fig, (ax1, ax2) = plt.subplots(1, 2,", "_ch = eval_ws(in_data, ws_labels, n_map) cluster_labels[i], avg_sils[i], ch_scs[i] = _c[0], _as[0], _ch[0] n_clusters", "image.shape[0], image.shape[1])) for i in range(num_bins): val = filters.threshold_local(image, block_size=3 + 2*i) block_mask", "n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw:", "<= bins[i]) markers = measure.label(thres_mask, connectivity=conn) ws_labels[i*ncols + 3] = closest_n(markers) - 1", "from timeit import default_timer as timer import random from acse_9_irp_wafflescore import MiscHelpers as", "4].title.set_text('b_knn: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols", "random search for SOMs best parameters. Parameters ---------- in_data : np.array or list", "to 0 best_idx.append(np.nanargmax(np.array(avg_purs))) # closest to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels,", "10 Parameters ---------- num_sample : int Total number of data points that will", "not None): avg_ents[i], avg_purs[i] = mh.ext_eval_entropy(label, cluster_labels[i]) best_idx = [] best_idx.append(np.nanargmax(np.array(avg_sils))) # closest", "or list data matrix init_guess : tuple list of initial guess of the", "timeit import default_timer as timer import random from acse_9_irp_wafflescore import MiscHelpers as mh", "counterpart parameters. \"\"\" random.seed(seed) param_grid = gen_param_grid(init_guess) dims = np.zeros(max_eval) iters = np.zeros(max_eval)", "contain the same number of data points Parameters ---------- in_data : np.array or", "= save_name + '_report.csv' print('Report saved at', fdir) mode = 'w' f1 =", "range(len_watershed): param = {'watershed idx': i} if(len(np.unique(ws_labels[i])) > 1): cluster_labels[i] = gen_e_model(n_map, ws_labels[i])", "mask's border indexes by using the K-nearest neighbor method Parameters ---------- value :", "4])))) ax[i*ncols + 5].imshow(ws_labels[i*ncols + 5], origin='lower') ax[i*ncols + 5].title.set_text('b_rw: it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols", "_ch[0] n_clusters = len(np.unique(cluster_labels[i])) if(n_clusters < 5 or n_clusters > 30): logging.info(\"Random search", "optional spread of the neighborhood function, by default 2.5 Returns ------- MiniSom an", "at:', save_dir) plt.show() def save_som_report(som, save_name, it, et, report=None): param_vals = str(save_name) +", "report=None): param_vals = str(save_name) + '\\n---' + \\ '\\niterations,' + str(it) + \\", "logging.basicConfig(format='%(asctime)s | %(levelname)s : %(message)s', level=logging.INFO, stream=sys.stdout) def compute_dim(num_sample): \"\"\" Compute a default", "list, optional the true label of each data point, by default None seed", "bool, optional flag, by default False save_name : str, optional the name which", "np.full(max_eval, np.nan) i = 0 while i < max_eval: random_params = {k: random.sample(v,", "scipy.spatial import distance from collections import Counter from timeit import default_timer as timer", "to file fdir = save_name + '_report.csv' print('Report saved at', fdir) mode =", "it={} n_class={}'.format(i, len(np.unique(ws_labels[i*ncols + 5])))) return ws_labels def eval_ws(in_data, ws_labels, n_map, label=None, re_all=False):", "\"\"\" from minisom import MiniSom, asymptotic_decay import numpy as np import matplotlib.pyplot as", "+ 10 > 10 else 20 param_grid = { 'dim': list(range(min_dim, max_dim+1)), 'iter_cnt':", "and return the best watershed prediction result Parameters ---------- in_data : np.array or", "n_map = som.neuron_map(in_data) cluster_labels, _, _ = eval_ws(in_data, ws_labels, n_map) return som, cluster_labels", "figsize=(12, 5)) fig.show() ax1.pcolor(som_u_mat, cmap='bone_r') hist = plt.hist(f_image, histedges_equalN(f_image, 10), density=True) return hist[1]", "ch_scs, best_idx) else: return (cluster_labels[best_idx], avg_sils[best_idx], ch_scs[best_idx]) def run_SOMs(in_data, dim, iter_cnt, lr, sigma,", "each litho-class plt.pcolor(som.distance_map().T, cmap='bone_r') # plotting the distance map as background plt.colorbar() for", "to 1 best_idx = np.unique(best_idx) if(re_all): return (cluster_labels, avg_sils, ch_scs, dims, iters, lrs,", "ws_labels, n_map) return som, cluster_labels def gen_param_grid(init_guess): g_dim, g_it, g_lr, g_sigma = init_guess", "not None): avg_ents = np.full(len_watershed, np.nan) avg_purs = np.full(len_watershed, np.nan) for i in", "if(re_all): return (cluster_labels, avg_sils, ch_scs, dims, iters, lrs, sigmas, best_idx) else: return (cluster_labels[best_idx],", "import Counter from timeit import default_timer as timer import random from acse_9_irp_wafflescore import", "all the histogram bins \"\"\" f_image = som_u_mat.flatten() fig, (ax1, ax2) = plt.subplots(1,", "ws_labels def eval_ws(in_data, ws_labels, n_map, label=None, re_all=False): \"\"\"Evaluate and return the best watershed", "for SOMs best parameters. Parameters ---------- in_data : np.array or list data matrix", "the original inputted value.\") return value vals = np.array(vals).T for b in borders:", "png file, by default 'temp' \"\"\" plt.figure(figsize=(9, 7)) # Plotting the response for" ]
[ "VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED: print(\"! Source '\" + repo_url + \"' requires", "not in index or not index['repo'] == 'wapkg': return None if not index['version']", "\"' requires newer version of wapkg, \" + 'consider upgrading your software in", "all it's dependencies can be successfully installed def trace_pkg_deps(pkgs_bundle, vs, name): pkg =", "'requirements' in pkg: for req in pkg['requirements']: if not trace_pkg_deps(pkgs_bundle, vs, req): return", "be successfully installed def trace_pkg_deps(pkgs_bundle, vs, name): pkg = None for pkgs in", "'\" + repo_url + \"' requires newer version of wapkg, \" + 'consider", "vs) if not pkg: return False if 'requirements' in pkg: for req in", "None if not index['version'] == VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED: print(\"! Source '\"", "None return pkg # Returns True if package and all it's dependencies can", "urlopen from urllib.error import URLError from urllib.parse import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST", "pkg: return False if 'requirements' in pkg: for req in pkg['requirements']: if not", "order to use this repo.') return None return index def fetch_external_sources(): sources =", "\" + 'consider upgrading your software in order to use this repo.') return", "this repo.') return None return index def fetch_external_sources(): sources = [] try: with", "repo.') return None return index def fetch_external_sources(): sources = [] try: with urlopen(EXTERNAL_LIST)", "index dictionary object, or None in case of failure def fetch_index(repo_url): try: with", "Unwraps the 'switch' content def select_pkg(pkg, vs): if not pkg: return None if", "pkgs in pkgs_bundle: if name in pkgs: pkg = pkgs[name] break pkg =", "upgrading your software in order to use this repo.') return None return index", "if 'requirements' in pkg: for req in pkg['requirements']: if not trace_pkg_deps(pkgs_bundle, vs, req):", "from urllib.error import URLError from urllib.parse import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST =", "except URLError: pass return sources # Unwraps the 'switch' content def select_pkg(pkg, vs):", "VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary object, or", "return pkg # Returns True if package and all it's dependencies can be", "== VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED: print(\"! Source '\" + repo_url + \"'", "index['repo'] == 'wapkg': return None if not index['version'] == VERSION_REQUIRED: if index['version'] >", "newer version of wapkg, \" + 'consider upgrading your software in order to", "if index['version'] > VERSION_REQUIRED: print(\"! Source '\" + repo_url + \"' requires newer", "with urlopen(EXTERNAL_LIST) as lst_req: for src in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if len(src_)", "switch: return switch['*'] return None return pkg # Returns True if package and", "urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary object,", "None switch = pkg['switch'] for v in switch: if vs in v.split(','): return", "wapkg, \" + 'consider upgrading your software in order to use this repo.')", "vs: return None switch = pkg['switch'] for v in switch: if vs in", "# Returns True if package and all it's dependencies can be successfully installed", "pkg = pkgs[name] break pkg = select_pkg(pkg, vs) if not pkg: return False", "True if package and all it's dependencies can be successfully installed def trace_pkg_deps(pkgs_bundle,", "if vs in v.split(','): return switch[v] if '*' in switch: return switch['*'] return", "successfully installed def trace_pkg_deps(pkgs_bundle, vs, name): pkg = None for pkgs in pkgs_bundle:", "pkgs_bundle: if name in pkgs: pkg = pkgs[name] break pkg = select_pkg(pkg, vs)", "as index_req: index = json.loads(index_req.read().decode('utf-8')) except URLError: return None if 'repo' not in", "import json from urllib.request import urlopen from urllib.error import URLError from urllib.parse import", "None return index def fetch_external_sources(): sources = [] try: with urlopen(EXTERNAL_LIST) as lst_req:", "repo index dictionary object, or None in case of failure def fetch_index(repo_url): try:", "software in order to use this repo.') return None return index def fetch_external_sources():", "pass return sources # Unwraps the 'switch' content def select_pkg(pkg, vs): if not", "to use this repo.') return None return index def fetch_external_sources(): sources = []", "in switch: if vs in v.split(','): return switch[v] if '*' in switch: return", "not index['repo'] == 'wapkg': return None if not index['version'] == VERSION_REQUIRED: if index['version']", "or not index['repo'] == 'wapkg': return None if not index['version'] == VERSION_REQUIRED: if", "+ \"' requires newer version of wapkg, \" + 'consider upgrading your software", "src in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if len(src_) and not src_.startswith('#'): sources.append(src_) except", "src.strip() if len(src_) and not src_.startswith('#'): sources.append(src_) except URLError: pass return sources #", "the 'switch' content def select_pkg(pkg, vs): if not pkg: return None if 'switch'", "installed def trace_pkg_deps(pkgs_bundle, vs, name): pkg = None for pkgs in pkgs_bundle: if", "lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if len(src_) and not src_.startswith('#'): sources.append(src_) except URLError: pass", "select_pkg(pkg, vs): if not pkg: return None if 'switch' in pkg: if not", "= None for pkgs in pkgs_bundle: if name in pkgs: pkg = pkgs[name]", "+ repo_url + \"' requires newer version of wapkg, \" + 'consider upgrading", "= [] try: with urlopen(EXTERNAL_LIST) as lst_req: for src in lst_req.read().decode('utf-8').split('\\n'): src_ =", "# Returns repo index dictionary object, or None in case of failure def", "'*' in switch: return switch['*'] return None return pkg # Returns True if", "None in case of failure def fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json')) as index_req:", "in case of failure def fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json')) as index_req: index", "if name in pkgs: pkg = pkgs[name] break pkg = select_pkg(pkg, vs) if", "vs in v.split(','): return switch[v] if '*' in switch: return switch['*'] return None", "not index['version'] == VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED: print(\"! Source '\" + repo_url", "= pkg['switch'] for v in switch: if vs in v.split(','): return switch[v] if", "index or not index['repo'] == 'wapkg': return None if not index['version'] == VERSION_REQUIRED:", "json from urllib.request import urlopen from urllib.error import URLError from urllib.parse import urljoin", "'switch' content def select_pkg(pkg, vs): if not pkg: return None if 'switch' in", "v in switch: if vs in v.split(','): return switch[v] if '*' in switch:", "in v.split(','): return switch[v] if '*' in switch: return switch['*'] return None return", "== 'wapkg': return None if not index['version'] == VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED:", "dictionary object, or None in case of failure def fetch_index(repo_url): try: with urlopen(urljoin(repo_url,", "index['version'] > VERSION_REQUIRED: print(\"! Source '\" + repo_url + \"' requires newer version", "len(src_) and not src_.startswith('#'): sources.append(src_) except URLError: pass return sources # Unwraps the", "Returns repo index dictionary object, or None in case of failure def fetch_index(repo_url):", "3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary object, or None in", "requires newer version of wapkg, \" + 'consider upgrading your software in order", "vs): if not pkg: return None if 'switch' in pkg: if not vs:", "pkg: if not vs: return None switch = pkg['switch'] for v in switch:", "return switch[v] if '*' in switch: return switch['*'] return None return pkg #", "switch['*'] return None return pkg # Returns True if package and all it's", "return None switch = pkg['switch'] for v in switch: if vs in v.split(','):", "if 'repo' not in index or not index['repo'] == 'wapkg': return None if", "None for pkgs in pkgs_bundle: if name in pkgs: pkg = pkgs[name] break", "'wapkg': return None if not index['version'] == VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED: print(\"!", "if not vs: return None switch = pkg['switch'] for v in switch: if", "'consider upgrading your software in order to use this repo.') return None return", "= json.loads(index_req.read().decode('utf-8')) except URLError: return None if 'repo' not in index or not", "def fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json')) as index_req: index = json.loads(index_req.read().decode('utf-8')) except URLError:", "in pkgs_bundle: if name in pkgs: pkg = pkgs[name] break pkg = select_pkg(pkg,", "for pkgs in pkgs_bundle: if name in pkgs: pkg = pkgs[name] break pkg", "your software in order to use this repo.') return None return index def", "as lst_req: for src in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if len(src_) and not", "return sources # Unwraps the 'switch' content def select_pkg(pkg, vs): if not pkg:", "for src in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if len(src_) and not src_.startswith('#'): sources.append(src_)", "# Unwraps the 'switch' content def select_pkg(pkg, vs): if not pkg: return None", "pkgs: pkg = pkgs[name] break pkg = select_pkg(pkg, vs) if not pkg: return", "if package and all it's dependencies can be successfully installed def trace_pkg_deps(pkgs_bundle, vs,", "if not pkg: return None if 'switch' in pkg: if not vs: return", "in index or not index['repo'] == 'wapkg': return None if not index['version'] ==", "for v in switch: if vs in v.split(','): return switch[v] if '*' in", "if '*' in switch: return switch['*'] return None return pkg # Returns True", "name in pkgs: pkg = pkgs[name] break pkg = select_pkg(pkg, vs) if not", "src_ = src.strip() if len(src_) and not src_.startswith('#'): sources.append(src_) except URLError: pass return", "urlopen(EXTERNAL_LIST) as lst_req: for src in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if len(src_) and", "with urlopen(urljoin(repo_url, 'index.json')) as index_req: index = json.loads(index_req.read().decode('utf-8')) except URLError: return None if", "return False if 'requirements' in pkg: for req in pkg['requirements']: if not trace_pkg_deps(pkgs_bundle,", "fetch_external_sources(): sources = [] try: with urlopen(EXTERNAL_LIST) as lst_req: for src in lst_req.read().decode('utf-8').split('\\n'):", "return switch['*'] return None return pkg # Returns True if package and all", "dependencies can be successfully installed def trace_pkg_deps(pkgs_bundle, vs, name): pkg = None for", "not pkg: return None if 'switch' in pkg: if not vs: return None", "URLError: return None if 'repo' not in index or not index['repo'] == 'wapkg':", "switch[v] if '*' in switch: return switch['*'] return None return pkg # Returns", "pkg['switch'] for v in switch: if vs in v.split(','): return switch[v] if '*'", "None if 'switch' in pkg: if not vs: return None switch = pkg['switch']", "if not index['version'] == VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED: print(\"! Source '\" +", "EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary object, or None in case", "in switch: return switch['*'] return None return pkg # Returns True if package", "VERSION_REQUIRED: print(\"! Source '\" + repo_url + \"' requires newer version of wapkg,", "and not src_.startswith('#'): sources.append(src_) except URLError: pass return sources # Unwraps the 'switch'", "can be successfully installed def trace_pkg_deps(pkgs_bundle, vs, name): pkg = None for pkgs", "= pkgs[name] break pkg = select_pkg(pkg, vs) if not pkg: return False if", "break pkg = select_pkg(pkg, vs) if not pkg: return False if 'requirements' in", "Source '\" + repo_url + \"' requires newer version of wapkg, \" +", "urllib.error import URLError from urllib.parse import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab'", "if not pkg: return False if 'requirements' in pkg: for req in pkg['requirements']:", "content def select_pkg(pkg, vs): if not pkg: return None if 'switch' in pkg:", "index def fetch_external_sources(): sources = [] try: with urlopen(EXTERNAL_LIST) as lst_req: for src", "+ 'consider upgrading your software in order to use this repo.') return None", "use this repo.') return None return index def fetch_external_sources(): sources = [] try:", "= src.strip() if len(src_) and not src_.startswith('#'): sources.append(src_) except URLError: pass return sources", "of failure def fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json')) as index_req: index = json.loads(index_req.read().decode('utf-8'))", "except URLError: return None if 'repo' not in index or not index['repo'] ==", "src_.startswith('#'): sources.append(src_) except URLError: pass return sources # Unwraps the 'switch' content def", "try: with urlopen(urljoin(repo_url, 'index.json')) as index_req: index = json.loads(index_req.read().decode('utf-8')) except URLError: return None", "pkg = None for pkgs in pkgs_bundle: if name in pkgs: pkg =", "repo_url + \"' requires newer version of wapkg, \" + 'consider upgrading your", "'index.json')) as index_req: index = json.loads(index_req.read().decode('utf-8')) except URLError: return None if 'repo' not", "urlopen(urljoin(repo_url, 'index.json')) as index_req: index = json.loads(index_req.read().decode('utf-8')) except URLError: return None if 'repo'", "sources.append(src_) except URLError: pass return sources # Unwraps the 'switch' content def select_pkg(pkg,", "and all it's dependencies can be successfully installed def trace_pkg_deps(pkgs_bundle, vs, name): pkg", "= select_pkg(pkg, vs) if not pkg: return False if 'requirements' in pkg: for", "package and all it's dependencies can be successfully installed def trace_pkg_deps(pkgs_bundle, vs, name):", "not vs: return None switch = pkg['switch'] for v in switch: if vs", "print(\"! Source '\" + repo_url + \"' requires newer version of wapkg, \"", "sources = [] try: with urlopen(EXTERNAL_LIST) as lst_req: for src in lst_req.read().decode('utf-8').split('\\n'): src_", "return index def fetch_external_sources(): sources = [] try: with urlopen(EXTERNAL_LIST) as lst_req: for", "def trace_pkg_deps(pkgs_bundle, vs, name): pkg = None for pkgs in pkgs_bundle: if name", "trace_pkg_deps(pkgs_bundle, vs, name): pkg = None for pkgs in pkgs_bundle: if name in", "False if 'requirements' in pkg: for req in pkg['requirements']: if not trace_pkg_deps(pkgs_bundle, vs,", "URLError: pass return sources # Unwraps the 'switch' content def select_pkg(pkg, vs): if", "pkg: return None if 'switch' in pkg: if not vs: return None switch", "in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if len(src_) and not src_.startswith('#'): sources.append(src_) except URLError:", "pkg: for req in pkg['requirements']: if not trace_pkg_deps(pkgs_bundle, vs, req): return False return", "'repo' not in index or not index['repo'] == 'wapkg': return None if not", "def select_pkg(pkg, vs): if not pkg: return None if 'switch' in pkg: if", "it's dependencies can be successfully installed def trace_pkg_deps(pkgs_bundle, vs, name): pkg = None", "import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary", "= 'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary object, or None in case of", "failure def fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json')) as index_req: index = json.loads(index_req.read().decode('utf-8')) except", "index = json.loads(index_req.read().decode('utf-8')) except URLError: return None if 'repo' not in index or", "[] try: with urlopen(EXTERNAL_LIST) as lst_req: for src in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip()", "if 'switch' in pkg: if not vs: return None switch = pkg['switch'] for", "return None if 'repo' not in index or not index['repo'] == 'wapkg': return", "> VERSION_REQUIRED: print(\"! Source '\" + repo_url + \"' requires newer version of", "name): pkg = None for pkgs in pkgs_bundle: if name in pkgs: pkg", "from urllib.parse import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo", "if len(src_) and not src_.startswith('#'): sources.append(src_) except URLError: pass return sources # Unwraps", "import urlopen from urllib.error import URLError from urllib.parse import urljoin VERSION_REQUIRED = 3", "import URLError from urllib.parse import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' #", "Returns True if package and all it's dependencies can be successfully installed def", "urllib.request import urlopen from urllib.error import URLError from urllib.parse import urljoin VERSION_REQUIRED =", "not pkg: return False if 'requirements' in pkg: for req in pkg['requirements']: if", "return None if 'switch' in pkg: if not vs: return None switch =", "URLError from urllib.parse import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns", "switch: if vs in v.split(','): return switch[v] if '*' in switch: return switch['*']", "or None in case of failure def fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json')) as", "return None return index def fetch_external_sources(): sources = [] try: with urlopen(EXTERNAL_LIST) as", "version of wapkg, \" + 'consider upgrading your software in order to use", "not src_.startswith('#'): sources.append(src_) except URLError: pass return sources # Unwraps the 'switch' content", "pkg # Returns True if package and all it's dependencies can be successfully", "in pkgs: pkg = pkgs[name] break pkg = select_pkg(pkg, vs) if not pkg:", "in pkg: for req in pkg['requirements']: if not trace_pkg_deps(pkgs_bundle, vs, req): return False", "object, or None in case of failure def fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json'))", "for req in pkg['requirements']: if not trace_pkg_deps(pkgs_bundle, vs, req): return False return True", "def fetch_external_sources(): sources = [] try: with urlopen(EXTERNAL_LIST) as lst_req: for src in", "v.split(','): return switch[v] if '*' in switch: return switch['*'] return None return pkg", "try: with urlopen(EXTERNAL_LIST) as lst_req: for src in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if", "'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary object, or None in case of failure", "index['version'] == VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED: print(\"! Source '\" + repo_url +", "vs, name): pkg = None for pkgs in pkgs_bundle: if name in pkgs:", "sources # Unwraps the 'switch' content def select_pkg(pkg, vs): if not pkg: return", "None if 'repo' not in index or not index['repo'] == 'wapkg': return None", "in order to use this repo.') return None return index def fetch_external_sources(): sources", "select_pkg(pkg, vs) if not pkg: return False if 'requirements' in pkg: for req", "from urllib.request import urlopen from urllib.error import URLError from urllib.parse import urljoin VERSION_REQUIRED", "= 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo index dictionary object, or None", "switch = pkg['switch'] for v in switch: if vs in v.split(','): return switch[v]", "index_req: index = json.loads(index_req.read().decode('utf-8')) except URLError: return None if 'repo' not in index", "return None if not index['version'] == VERSION_REQUIRED: if index['version'] > VERSION_REQUIRED: print(\"! Source", "in pkg: if not vs: return None switch = pkg['switch'] for v in", "json.loads(index_req.read().decode('utf-8')) except URLError: return None if 'repo' not in index or not index['repo']", "of wapkg, \" + 'consider upgrading your software in order to use this", "pkg = select_pkg(pkg, vs) if not pkg: return False if 'requirements' in pkg:", "urllib.parse import urljoin VERSION_REQUIRED = 3 EXTERNAL_LIST = 'https://pastebin.com/raw/aKjmATab' # Returns repo index", "return None return pkg # Returns True if package and all it's dependencies", "lst_req: for src in lst_req.read().decode('utf-8').split('\\n'): src_ = src.strip() if len(src_) and not src_.startswith('#'):", "pkgs[name] break pkg = select_pkg(pkg, vs) if not pkg: return False if 'requirements'", "'switch' in pkg: if not vs: return None switch = pkg['switch'] for v", "case of failure def fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json')) as index_req: index =", "fetch_index(repo_url): try: with urlopen(urljoin(repo_url, 'index.json')) as index_req: index = json.loads(index_req.read().decode('utf-8')) except URLError: return" ]
[ "does not contain s[start] any more char_need[str[start]] += 1 # when some count", "is found, move start to find a smaller window. from collections import defaultdict", "current window needs all char in t char_need[i] += 1 while end <", "import maxint def findSubString(str, pat): import pdb pdb.set_trace() MAX_INT = maxint start =", "min_start = 0 for i in pat: # current window needs all char", "start = end = 0 char_need = defaultdict(int) # the count of char", "if min_length > end - start: min_length = end - start min_start =", "but not current window if char_need[str[start]] > 0: count_need += 1 start +=", "1 start += 1 return \"\" if min_length == MAX_INT else str[min_start:min_start +", "current window if char_need[str[start]] > 0: count_need += 1 start += 1 return", "maxint start = end = 0 char_need = defaultdict(int) # the count of", "# when some count in char_need is positive, it means # there is", "two pointers: start and end to represent a window. #2. Move end to", "#2. Move end to find a valid window. #3. When a valid window", "= len(pat) # count of chars not in current window but in t", "pointers: start and end to represent a window. #2. Move end to find", "char_need = defaultdict(int) # the count of char needed by current window, negative", "def findSubString(str, pat): import pdb pdb.set_trace() MAX_INT = maxint start = end =", "0 for i in pat: # current window needs all char in t", "= end - start min_start = start # current window does not contain", "current window does not contain s[start] any more char_need[str[start]] += 1 # when", "+= 1 return \"\" if min_length == MAX_INT else str[min_start:min_start + min_length] print", "end - start: min_length = end - start min_start = start # current", "# current window needs all char in t char_need[i] += 1 while end", "char needed by current window, negative means current window has it but not", "needs it count_need = len(pat) # count of chars not in current window", "= end = 0 char_need = defaultdict(int) # the count of char needed", "negative means current window has it but not needs it count_need = len(pat)", "end < len(str): if char_need[str[end]] > 0: count_need -= 1 # current window", "<filename>min_win_substr.py # https://discuss.leetcode.com/topic/30941/here-is-a-10-line-template-that-can-solve-most-substring-problems/12 #1. Use two pointers: start and end to represent a", "collections import defaultdict from sys import maxint def findSubString(str, pat): import pdb pdb.set_trace()", "# there is char in t but not current window if char_need[str[start]] >", "t char_need[i] += 1 while end < len(str): if char_need[str[end]] > 0: count_need", "end += 1 while count_need == 0: if min_length > end - start:", "it count_need = len(pat) # count of chars not in current window but", "need it any more char_need[str[end]] -= 1 end += 1 while count_need ==", "not need it any more char_need[str[end]] -= 1 end += 1 while count_need", "more char_need[str[end]] -= 1 end += 1 while count_need == 0: if min_length", "len(pat) # count of chars not in current window but in t min_length", "start to find a smaller window. from collections import defaultdict from sys import", "in t but not current window if char_need[str[start]] > 0: count_need += 1", "current window, negative means current window has it but not needs it count_need", "= MAX_INT min_start = 0 for i in pat: # current window needs", "s[start] any more char_need[str[start]] += 1 # when some count in char_need is", "from collections import defaultdict from sys import maxint def findSubString(str, pat): import pdb", "window is found, move start to find a smaller window. from collections import", "by current window, negative means current window has it but not needs it", "1 while count_need == 0: if min_length > end - start: min_length =", "a window. #2. Move end to find a valid window. #3. When a", "# current window contains s[end] now, so does not need it any more", "window if char_need[str[start]] > 0: count_need += 1 start += 1 return \"\"", "to find a valid window. #3. When a valid window is found, move", "valid window is found, move start to find a smaller window. from collections", "any more char_need[str[end]] -= 1 end += 1 while count_need == 0: if", "means current window has it but not needs it count_need = len(pat) #", "in t min_length = MAX_INT min_start = 0 for i in pat: #", "current window has it but not needs it count_need = len(pat) # count", "window does not contain s[start] any more char_need[str[start]] += 1 # when some", "not in current window but in t min_length = MAX_INT min_start = 0", "not needs it count_need = len(pat) # count of chars not in current", "a valid window is found, move start to find a smaller window. from", "count of chars not in current window but in t min_length = MAX_INT", "current window but in t min_length = MAX_INT min_start = 0 for i", "but not needs it count_need = len(pat) # count of chars not in", "for i in pat: # current window needs all char in t char_need[i]", "MAX_INT min_start = 0 for i in pat: # current window needs all", "Use two pointers: start and end to represent a window. #2. Move end", "to represent a window. #2. Move end to find a valid window. #3.", "to find a smaller window. from collections import defaultdict from sys import maxint", "contains s[end] now, so does not need it any more char_need[str[end]] -= 1", "+= 1 # when some count in char_need is positive, it means #", "start: min_length = end - start min_start = start # current window does", "import defaultdict from sys import maxint def findSubString(str, pat): import pdb pdb.set_trace() MAX_INT", "and end to represent a window. #2. Move end to find a valid", "0: count_need += 1 start += 1 return \"\" if min_length == MAX_INT", "i in pat: # current window needs all char in t char_need[i] +=", "start += 1 return \"\" if min_length == MAX_INT else str[min_start:min_start + min_length]", "char_need is positive, it means # there is char in t but not", "end to represent a window. #2. Move end to find a valid window.", "len(str): if char_need[str[end]] > 0: count_need -= 1 # current window contains s[end]", "return \"\" if min_length == MAX_INT else str[min_start:min_start + min_length] print findSubString(\"ADOBECODEBANC\", \"ABC\")", "-= 1 # current window contains s[end] now, so does not need it", "a smaller window. from collections import defaultdict from sys import maxint def findSubString(str,", "Move end to find a valid window. #3. When a valid window is", "> 0: count_need += 1 start += 1 return \"\" if min_length ==", "while end < len(str): if char_need[str[end]] > 0: count_need -= 1 # current", "there is char in t but not current window if char_need[str[start]] > 0:", "of char needed by current window, negative means current window has it but", "positive, it means # there is char in t but not current window", "count_need = len(pat) # count of chars not in current window but in", "= 0 for i in pat: # current window needs all char in", "= defaultdict(int) # the count of char needed by current window, negative means", "all char in t char_need[i] += 1 while end < len(str): if char_need[str[end]]", "- start: min_length = end - start min_start = start # current window", "window. #3. When a valid window is found, move start to find a", "+= 1 while end < len(str): if char_need[str[end]] > 0: count_need -= 1", "if char_need[str[end]] > 0: count_need -= 1 # current window contains s[end] now,", "it means # there is char in t but not current window if", "# count of chars not in current window but in t min_length =", "pat: # current window needs all char in t char_need[i] += 1 while", "it but not needs it count_need = len(pat) # count of chars not", "1 return \"\" if min_length == MAX_INT else str[min_start:min_start + min_length] print findSubString(\"ADOBECODEBANC\",", "a valid window. #3. When a valid window is found, move start to", "more char_need[str[start]] += 1 # when some count in char_need is positive, it", "needed by current window, negative means current window has it but not needs", "- start min_start = start # current window does not contain s[start] any", "in current window but in t min_length = MAX_INT min_start = 0 for", "start min_start = start # current window does not contain s[start] any more", "count in char_need is positive, it means # there is char in t", "defaultdict from sys import maxint def findSubString(str, pat): import pdb pdb.set_trace() MAX_INT =", "has it but not needs it count_need = len(pat) # count of chars", "+= 1 while count_need == 0: if min_length > end - start: min_length", "min_length = MAX_INT min_start = 0 for i in pat: # current window", "window but in t min_length = MAX_INT min_start = 0 for i in", "does not need it any more char_need[str[end]] -= 1 end += 1 while", "window contains s[end] now, so does not need it any more char_need[str[end]] -=", "t but not current window if char_need[str[start]] > 0: count_need += 1 start", "char in t but not current window if char_need[str[start]] > 0: count_need +=", "char_need[str[end]] > 0: count_need -= 1 # current window contains s[end] now, so", "window needs all char in t char_need[i] += 1 while end < len(str):", "#3. When a valid window is found, move start to find a smaller", "0: if min_length > end - start: min_length = end - start min_start", "not current window if char_need[str[start]] > 0: count_need += 1 start += 1", "smaller window. from collections import defaultdict from sys import maxint def findSubString(str, pat):", "chars not in current window but in t min_length = MAX_INT min_start =", "find a valid window. #3. When a valid window is found, move start", "end to find a valid window. #3. When a valid window is found,", "the count of char needed by current window, negative means current window has", "min_length > end - start: min_length = end - start min_start = start", "is positive, it means # there is char in t but not current", "count_need += 1 start += 1 return \"\" if min_length == MAX_INT else", "== 0: if min_length > end - start: min_length = end - start", "window has it but not needs it count_need = len(pat) # count of", "find a smaller window. from collections import defaultdict from sys import maxint def", "count of char needed by current window, negative means current window has it", "1 # current window contains s[end] now, so does not need it any", "= maxint start = end = 0 char_need = defaultdict(int) # the count", "some count in char_need is positive, it means # there is char in", "min_length = end - start min_start = start # current window does not", "any more char_need[str[start]] += 1 # when some count in char_need is positive,", "char_need[str[start]] > 0: count_need += 1 start += 1 return \"\" if min_length", "s[end] now, so does not need it any more char_need[str[end]] -= 1 end", "move start to find a smaller window. from collections import defaultdict from sys", "window. #2. Move end to find a valid window. #3. When a valid", "+= 1 start += 1 return \"\" if min_length == MAX_INT else str[min_start:min_start", "import pdb pdb.set_trace() MAX_INT = maxint start = end = 0 char_need =", "start and end to represent a window. #2. Move end to find a", "MAX_INT = maxint start = end = 0 char_need = defaultdict(int) # the", "not contain s[start] any more char_need[str[start]] += 1 # when some count in", "window. from collections import defaultdict from sys import maxint def findSubString(str, pat): import", "start # current window does not contain s[start] any more char_need[str[start]] += 1", "0 char_need = defaultdict(int) # the count of char needed by current window,", "char_need[str[end]] -= 1 end += 1 while count_need == 0: if min_length >", "needs all char in t char_need[i] += 1 while end < len(str): if", "findSubString(str, pat): import pdb pdb.set_trace() MAX_INT = maxint start = end = 0", "end - start min_start = start # current window does not contain s[start]", "sys import maxint def findSubString(str, pat): import pdb pdb.set_trace() MAX_INT = maxint start", "char_need[str[start]] += 1 # when some count in char_need is positive, it means", "means # there is char in t but not current window if char_need[str[start]]", "so does not need it any more char_need[str[end]] -= 1 end += 1", "of chars not in current window but in t min_length = MAX_INT min_start", "0: count_need -= 1 # current window contains s[end] now, so does not", "valid window. #3. When a valid window is found, move start to find", "count_need == 0: if min_length > end - start: min_length = end -", "> end - start: min_length = end - start min_start = start #", "1 end += 1 while count_need == 0: if min_length > end -", "= 0 char_need = defaultdict(int) # the count of char needed by current", "pdb pdb.set_trace() MAX_INT = maxint start = end = 0 char_need = defaultdict(int)", "if char_need[str[start]] > 0: count_need += 1 start += 1 return \"\" if", "now, so does not need it any more char_need[str[end]] -= 1 end +=", "when some count in char_need is positive, it means # there is char", "it any more char_need[str[end]] -= 1 end += 1 while count_need == 0:", "1 while end < len(str): if char_need[str[end]] > 0: count_need -= 1 #", "pat): import pdb pdb.set_trace() MAX_INT = maxint start = end = 0 char_need", "maxint def findSubString(str, pat): import pdb pdb.set_trace() MAX_INT = maxint start = end", "> 0: count_need -= 1 # current window contains s[end] now, so does", "< len(str): if char_need[str[end]] > 0: count_need -= 1 # current window contains", "defaultdict(int) # the count of char needed by current window, negative means current", "char in t char_need[i] += 1 while end < len(str): if char_need[str[end]] >", "https://discuss.leetcode.com/topic/30941/here-is-a-10-line-template-that-can-solve-most-substring-problems/12 #1. Use two pointers: start and end to represent a window. #2.", "= start # current window does not contain s[start] any more char_need[str[start]] +=", "# the count of char needed by current window, negative means current window", "in char_need is positive, it means # there is char in t but", "t min_length = MAX_INT min_start = 0 for i in pat: # current", "found, move start to find a smaller window. from collections import defaultdict from", "in pat: # current window needs all char in t char_need[i] += 1", "in t char_need[i] += 1 while end < len(str): if char_need[str[end]] > 0:", "but in t min_length = MAX_INT min_start = 0 for i in pat:", "from sys import maxint def findSubString(str, pat): import pdb pdb.set_trace() MAX_INT = maxint", "current window contains s[end] now, so does not need it any more char_need[str[end]]", "window, negative means current window has it but not needs it count_need =", "1 # when some count in char_need is positive, it means # there", "while count_need == 0: if min_length > end - start: min_length = end", "# https://discuss.leetcode.com/topic/30941/here-is-a-10-line-template-that-can-solve-most-substring-problems/12 #1. Use two pointers: start and end to represent a window.", "When a valid window is found, move start to find a smaller window.", "-= 1 end += 1 while count_need == 0: if min_length > end", "pdb.set_trace() MAX_INT = maxint start = end = 0 char_need = defaultdict(int) #", "end = 0 char_need = defaultdict(int) # the count of char needed by", "contain s[start] any more char_need[str[start]] += 1 # when some count in char_need", "#1. Use two pointers: start and end to represent a window. #2. Move", "# current window does not contain s[start] any more char_need[str[start]] += 1 #", "represent a window. #2. Move end to find a valid window. #3. When", "char_need[i] += 1 while end < len(str): if char_need[str[end]] > 0: count_need -=", "count_need -= 1 # current window contains s[end] now, so does not need", "is char in t but not current window if char_need[str[start]] > 0: count_need", "min_start = start # current window does not contain s[start] any more char_need[str[start]]" ]
[ "olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old files: {}\".format(olds)) for req in olds: req.clean_files() logger.warning(\"Cleaned\")", "logger.warning(\"TR id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile: infile = tr.infile", "import * from .utils import translate_document from datetime import timedelta, datetime from StringIO", "= TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr except Exception as ex: raise tr.status =", "tr except Exception as ex: raise tr.status = TranslationRequest.ERROR tr.message = str(\"{} --", "tr.template.text out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in out: tr.outfile.write(chunk)", "= TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile: infile = tr.infile else: infile = StringIO(tr.input)", "for chunk in out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return", "create_celery_app from .models import * from .utils import translate_document from datetime import timedelta,", "traceback.format_exc())) tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old files:", "= tr.infile else: infile = StringIO(tr.input) template = tr.template.text out = translate_document(infile=infile.get(), template=template,", "{}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old", "tr.infile: infile = tr.infile else: infile = StringIO(tr.input) template = tr.template.text out =", "def process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile: infile", "template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS", "import StringIO celery = create_celery_app().celery logger = celery.logger @celery.task() def process_request(tid): logger.warning(\"TR id:", "template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status =", "files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old files: {}\".format(olds)) for req in olds: req.clean_files()", "factory import create_celery_app from .models import * from .utils import translate_document from datetime", "* from .utils import translate_document from datetime import timedelta, datetime from StringIO import", "ex: raise tr.status = TranslationRequest.ERROR tr.message = str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task()", "import timedelta, datetime from StringIO import StringIO celery = create_celery_app().celery logger = celery.logger", "timedelta, datetime from StringIO import StringIO celery = create_celery_app().celery logger = celery.logger @celery.task()", "StringIO(tr.input) template = tr.template.text out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk", "else: infile = StringIO(tr.input) template = tr.template.text out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete()", "StringIO import StringIO celery = create_celery_app().celery logger = celery.logger @celery.task() def process_request(tid): logger.warning(\"TR", "coding: utf-8 -*- import time import traceback from factory import create_celery_app from .models", "= TranslationRequest.ERROR tr.message = str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning", "traceback from factory import create_celery_app from .models import * from .utils import translate_document", "datetime import timedelta, datetime from StringIO import StringIO celery = create_celery_app().celery logger =", "from StringIO import StringIO celery = create_celery_app().celery logger = celery.logger @celery.task() def process_request(tid):", "import create_celery_app from .models import * from .utils import translate_document from datetime import", "@celery.task() def clean_files(): logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old files: {}\".format(olds)) for", "tr.message = str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning files\") olds", "tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr except Exception as ex: raise tr.status", "datetime from StringIO import StringIO celery = create_celery_app().celery logger = celery.logger @celery.task() def", "return tr except Exception as ex: raise tr.status = TranslationRequest.ERROR tr.message = str(\"{}", "process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile: infile =", "tr.outfile.new_file(encoding=\"utf-8\") for chunk in out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\")", "TranslationRequest.ERROR tr.message = str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning files\")", "chunk in out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr", "infile = StringIO(tr.input) template = tr.template.text out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\")", "translate_document from datetime import timedelta, datetime from StringIO import StringIO celery = create_celery_app().celery", "StringIO celery = create_celery_app().celery logger = celery.logger @celery.task() def process_request(tid): logger.warning(\"TR id: {}\".format(tid))", "tr = TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile: infile = tr.infile else: infile =", "= StringIO(tr.input) template = tr.template.text out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for", "{}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile: infile = tr.infile else: infile", ".models import * from .utils import translate_document from datetime import timedelta, datetime from", "str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None,", "@celery.task() def process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile:", "import time import traceback from factory import create_celery_app from .models import * from", "= create_celery_app().celery logger = celery.logger @celery.task() def process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr =", "out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in out: tr.outfile.write(chunk) tr.outfile.close()", "tr.start() if tr.infile: infile = tr.infile else: infile = StringIO(tr.input) template = tr.template.text", "tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr except Exception as", "import translate_document from datetime import timedelta, datetime from StringIO import StringIO celery =", "celery.logger @celery.task() def process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try: tr.start() if", "from factory import create_celery_app from .models import * from .utils import translate_document from", "template = tr.template.text out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in", "tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr except Exception as ex:", "try: tr.start() if tr.infile: infile = tr.infile else: infile = StringIO(tr.input) template =", "if tr.infile: infile = tr.infile else: infile = StringIO(tr.input) template = tr.template.text out", "tr.save() tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr except Exception as ex: raise", "infile = tr.infile else: infile = StringIO(tr.input) template = tr.template.text out = translate_document(infile=infile.get(),", "id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile: infile = tr.infile else:", "<gh_stars>1-10 # -*- coding: utf-8 -*- import time import traceback from factory import", "-- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1)))", "out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr except Exception", "= tr.template.text out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in out:", "-*- coding: utf-8 -*- import time import traceback from factory import create_celery_app from", "create_celery_app().celery logger = celery.logger @celery.task() def process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid)", "logger = celery.logger @celery.task() def process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try:", "tr.status = TranslationRequest.ERROR tr.message = str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def clean_files():", "tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS tr.finish()", "tr.finish() logger.warning(\"Processed\") return tr except Exception as ex: raise tr.status = TranslationRequest.ERROR tr.message", "= celery.logger @celery.task() def process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr = TranslationRequest.objects.get(id=tid) try: tr.start()", "= translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in out: tr.outfile.write(chunk) tr.outfile.close() tr.save()", "from .utils import translate_document from datetime import timedelta, datetime from StringIO import StringIO", "logger.warning(\"Processed\") return tr except Exception as ex: raise tr.status = TranslationRequest.ERROR tr.message =", "clean_files(): logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old files: {}\".format(olds)) for req in", "logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old files: {}\".format(olds)) for req in olds:", "in out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status = TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr except", "as ex: raise tr.status = TranslationRequest.ERROR tr.message = str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish()", "tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old files: {}\".format(olds))", "TranslationRequest.objects.get(id=tid) try: tr.start() if tr.infile: infile = tr.infile else: infile = StringIO(tr.input) template", "celery = create_celery_app().celery logger = celery.logger @celery.task() def process_request(tid): logger.warning(\"TR id: {}\".format(tid)) tr", "raise tr.status = TranslationRequest.ERROR tr.message = str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def", "from datetime import timedelta, datetime from StringIO import StringIO celery = create_celery_app().celery logger", "time import traceback from factory import create_celery_app from .models import * from .utils", "import traceback from factory import create_celery_app from .models import * from .utils import", "Exception as ex: raise tr.status = TranslationRequest.ERROR tr.message = str(\"{} -- {}\".format(ex, traceback.format_exc()))", "from .models import * from .utils import translate_document from datetime import timedelta, datetime", "translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo()) tr.outfile.delete() tr.outfile.new_file(encoding=\"utf-8\") for chunk in out: tr.outfile.write(chunk) tr.outfile.close() tr.save() tr.status", "utf-8 -*- import time import traceback from factory import create_celery_app from .models import", ".utils import translate_document from datetime import timedelta, datetime from StringIO import StringIO celery", "# -*- coding: utf-8 -*- import time import traceback from factory import create_celery_app", "TranslationRequest.SUCCESS tr.finish() logger.warning(\"Processed\") return tr except Exception as ex: raise tr.status = TranslationRequest.ERROR", "except Exception as ex: raise tr.status = TranslationRequest.ERROR tr.message = str(\"{} -- {}\".format(ex,", "-*- import time import traceback from factory import create_celery_app from .models import *", "= str(\"{} -- {}\".format(ex, traceback.format_exc())) tr.finish() @celery.task() def clean_files(): logger.warning(\"Cleaning files\") olds =", "tr.infile else: infile = StringIO(tr.input) template = tr.template.text out = translate_document(infile=infile.get(), template=template, template_data=tr.to_mongo())", "def clean_files(): logger.warning(\"Cleaning files\") olds = TranslationRequest.objects(infile__ne=None, finished__lte=(datetime.now()-timedelta(days=1))) logger.warning(\"Old files: {}\".format(olds)) for req" ]
[ "@ipscan_blueprint.route('/getiplist') def get_ip_list(): result = [] tmp = {} domain_index = int(request.args.get(\"index\")) domain_offset", "pa_domain,pa_ip from .tasks import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__,", "{} domain_index = int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document", "Blueprint,request from app import pa_domain,pa_ip from .tasks import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db", "from app import pa_domain,pa_ip from .tasks import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint", "= Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain})", "# 声明ip_list ip_list = [] #获取整个domain所对应的ip for item in domain_index['subdomain']: for ip_s in", "scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def", "= pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in cursor: tmp['ip'] = document['ip'] tmp['add_time'] = document['add_time']", "for document in cursor: tmp['ip'] = document['ip'] tmp['add_time'] = document['add_time'] tmp['port'] = document['port']", "声明ip_list ip_list = [] #获取整个domain所对应的ip for item in domain_index['subdomain']: for ip_s in item['ip']:", "pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in cursor: tmp['ip'] = document['ip'] tmp['add_time'] = document['add_time'] tmp['port']", "#调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum')", "@ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result = []", "taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引", "{\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result", "= int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in cursor:", "ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引", "{\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def", "app import pa_domain,pa_ip from .tasks import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint =", "= document['ip'] tmp['add_time'] = document['add_time'] tmp['port'] = document['port'] result.append(tmp) tmp = {} return", "__name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index:", "insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量", "return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list():", "import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan')", "get_ip_list(): result = [] tmp = {} domain_index = int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\"))", "request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list ip_list = [] #获取整个domain所对应的ip for item", "#对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"}", "ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))})", "in domain_index['subdomain']: for ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list)", "-1)]).skip(domain_index).limit(domain_offset) for document in cursor: tmp['ip'] = document['ip'] tmp['add_time'] = document['add_time'] tmp['port'] =", "domain_index: # 声明ip_list ip_list = [] #获取整个domain所对应的ip for item in domain_index['subdomain']: for ip_s", "= [] #获取整个domain所对应的ip for item in domain_index['subdomain']: for ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重", "= [] tmp = {} domain_index = int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor =", "get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain = request.args.get(\"domain\")", "import pa_domain,pa_ip from .tasks import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\",", "flask import Blueprint,request from app import pa_domain,pa_ip from .tasks import scan_ip_task from celery_app.utils.utils", ".tasks import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan", "{\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result = [] tmp = {} domain_index", "domain_index = int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in", "#通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list", "in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return", "= int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in cursor: tmp['ip'] = document['ip']", "Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if", "#在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list ip_list = [] #获取整个domain所对应的ip for item in", "tmp = {} domain_index = int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset)", "document in cursor: tmp['ip'] = document['ip'] tmp['add_time'] = document['add_time'] tmp['port'] = document['port'] result.append(tmp)", "get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result = [] tmp =", "@ipscan_blueprint.route('/scan') def scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list ip_list", "item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"}", "celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain", "#获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result = [] tmp = {} domain_index =", "cursor: tmp['ip'] = document['ip'] tmp['add_time'] = document['add_time'] tmp['port'] = document['port'] result.append(tmp) tmp =", "[] #获取整个domain所对应的ip for item in domain_index['subdomain']: for ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list))", "offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result = [] tmp = {} domain_index = int(request.args.get(\"index\"))", "domain_offset = int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in cursor: tmp['ip'] =", "[] tmp = {} domain_index = int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id',", "= {} domain_index = int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for", "cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in cursor: tmp['ip'] = document['ip'] tmp['add_time'] =", "ip_list = [] #获取整个domain所对应的ip for item in domain_index['subdomain']: for ip_s in item['ip']: ip_list.append(ip_s)", "for item in domain_index['subdomain']: for ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务", "in cursor: tmp['ip'] = document['ip'] tmp['add_time'] = document['add_time'] tmp['port'] = document['port'] result.append(tmp) tmp", "import Blueprint,request from app import pa_domain,pa_ip from .tasks import scan_ip_task from celery_app.utils.utils import", "#获取整个domain所对应的ip for item in domain_index['subdomain']: for ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip", "int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in cursor: tmp['ip'] = document['ip'] tmp['add_time']", "from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip():", "= request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list ip_list = [] #获取整个domain所对应的ip for", "ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return", "result = [] tmp = {} domain_index = int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor", "def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result = [] tmp", "document['ip'] tmp['add_time'] = document['add_time'] tmp['port'] = document['port'] result.append(tmp) tmp = {} return {\"ip_list\":", "for ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库", "def get_ip_list(): result = [] tmp = {} domain_index = int(request.args.get(\"index\")) domain_offset =", "import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain =", "return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result = [] tmp = {}", "ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数", "domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list ip_list = [] #获取整个domain所对应的ip for item in domain_index['subdomain']:", "item in domain_index['subdomain']: for ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表", "# taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()}", "if domain_index: # 声明ip_list ip_list = [] #获取整个domain所对应的ip for item in domain_index['subdomain']: for", "scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list ip_list = []", "r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return", "from .tasks import scan_ip_task from celery_app.utils.utils import get_current_time,insert_taskid_db ipscan_blueprint = Blueprint(\"ipscan\", __name__, url_prefix='/ipscan')", "传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num():", "def scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list ip_list =", "int(request.args.get(\"index\")) domain_offset = int(request.args.get(\"offset\")) cursor = pa_ip.find().sort([('_id', -1)]).skip(domain_index).limit(domain_offset) for document in cursor: tmp['ip']", "任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) # taskid入库 insert_taskid_db({\"task_id\":r.task_id,\"add_time\":get_current_time(),\"task_type\":\"ip_scan\",\"ip_list\":ip_list,\"task_info\":\"对{0}域名下的{1}等{2}个ip进行端口扫描\".format(domain,ip_list[0],len(ip_list))}) return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def", "tmp['ip'] = document['ip'] tmp['add_time'] = document['add_time'] tmp['port'] = document['port'] result.append(tmp) tmp = {}", "domain_index['subdomain']: for ip_s in item['ip']: ip_list.append(ip_s) #对ip_list去重 ip_list=list(set(ip_list)) #调用scan_ip 任务 传入主域名和对应的ip列表 r=scan_ip_task.delay(domain,ip_list) #", "tmp['add_time'] = document['add_time'] tmp['port'] = document['port'] result.append(tmp) tmp = {} return {\"ip_list\": result}", "from flask import Blueprint,request from app import pa_domain,pa_ip from .tasks import scan_ip_task from", "#获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist') def get_ip_list(): result =", "return {\"code\":200,\"msg\":\"添加扫描任务成功\"} return {\"code\":201,\"msg\":\"未找到该域名所对应ip\"} #获取ip总数 @ipscan_blueprint.route('/getipnum') def get_ip_num(): return {\"ip_num\":pa_ip.find({}).count()} #获取ip列表,index为起始索引 offset为数量 @ipscan_blueprint.route('/getiplist')", "url_prefix='/ipscan') #通过传入一个一级域名,对这个域名下的所有ip进行scan @ipscan_blueprint.route('/scan') def scan_ip(): domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: #", "domain = request.args.get(\"domain\") #在数据库搜索该domain的索引 domain_index=pa_domain.find_one({\"domain\":domain}) if domain_index: # 声明ip_list ip_list = [] #获取整个domain所对应的ip" ]
[ "check_output import subprocess repo = None htable = [] config = { \"GIT_USER\":", "os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content, '>', filename]) # print(repo.git.add(filename))", "filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content, '>', filename]) # print(repo.git.add(filename)) # print(repo.git.commit( a=False, m=\"\\nUpdated", "requests from git import Repo, GitCommandError from subprocess import check_output import subprocess repo", "import subprocess repo = None htable = [] config = { \"GIT_USER\": \"alpaca\",", "import url_path_join as ujoin from notebook.base.handlers import IPythonHandler import os, json, git, urllib,", "# return False # def register_cell(cell, content): # filename = str(config['GIT_PARENT_DIR'] + \"/\"", "None htable = [] config = { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\",", "+ \"/\" + os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content, '>',", "= str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb', 'txt')) # subprocess.run(['cat',", "str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content,", "os, json, git, urllib, requests from git import Repo, GitCommandError from subprocess import", "import Repo, GitCommandError from subprocess import check_output import subprocess repo = None htable", "content): # filename = str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb',", "repo = None htable = [] config = { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"),", "def delete_cell(): # if cell in htable: # del htable[cell] # return True", "filename = str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb', 'txt')) #", "\"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def delete_cell(): # if cell in htable:", "git import Repo, GitCommandError from subprocess import check_output import subprocess repo = None", "IPythonHandler import os, json, git, urllib, requests from git import Repo, GitCommandError from", "# def register_cell(cell, content): # filename = str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME') +", "+ os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content, '>', filename]) #", "def register_cell(cell, content): # filename = str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME') + str(cell)", "if cell in htable: # del htable[cell] # return True # return False", "from subprocess import check_output import subprocess repo = None htable = [] config", "# del htable[cell] # return True # return False # def register_cell(cell, content):", "\"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\",", "import os, json, git, urllib, requests from git import Repo, GitCommandError from subprocess", "= { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" : \"alpaca\", #", "GitCommandError from subprocess import check_output import subprocess repo = None htable = []", "# if cell in htable: # del htable[cell] # return True # return", "'txt')) # subprocess.run(['cat', content, '>', filename]) # print(repo.git.add(filename)) # print(repo.git.commit( a=False, m=\"\\nUpdated {}\".format(filename)", "cell in htable: # del htable[cell] # return True # return False #", "# subprocess.run(['cat', content, '>', filename]) # print(repo.git.add(filename)) # print(repo.git.commit( a=False, m=\"\\nUpdated {}\".format(filename) ))", "[] config = { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" :", "Repo, GitCommandError from subprocess import check_output import subprocess repo = None htable =", "as ujoin from notebook.base.handlers import IPythonHandler import os, json, git, urllib, requests from", "+ str(cell) + filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content, '>', filename]) # print(repo.git.add(filename)) #", "urllib, requests from git import Repo, GitCommandError from subprocess import check_output import subprocess", "notebook.base.handlers import IPythonHandler import os, json, git, urllib, requests from git import Repo,", "import check_output import subprocess repo = None htable = [] config = {", "return True # return False # def register_cell(cell, content): # filename = str(config['GIT_PARENT_DIR']", "\"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def delete_cell(): # if", "\"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\":", "return False # def register_cell(cell, content): # filename = str(config['GIT_PARENT_DIR'] + \"/\" +", "\"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", #", "\"<PASSWORD>\" } # def delete_cell(): # if cell in htable: # del htable[cell]", "# return True # return False # def register_cell(cell, content): # filename =", "\"/\" + os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content, '>', filename])", "+ filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content, '>', filename]) # print(repo.git.add(filename)) # print(repo.git.commit( a=False,", "config = { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" : \"alpaca\",", "str(cell) + filename.replace('ipynb', 'txt')) # subprocess.run(['cat', content, '>', filename]) # print(repo.git.add(filename)) # print(repo.git.commit(", "delete_cell(): # if cell in htable: # del htable[cell] # return True #", "htable: # del htable[cell] # return True # return False # def register_cell(cell,", "del htable[cell] # return True # return False # def register_cell(cell, content): #", "False # def register_cell(cell, content): # filename = str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME')", "\"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def delete_cell(): # if cell in htable: # del", "ujoin from notebook.base.handlers import IPythonHandler import os, json, git, urllib, requests from git", "# filename = str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME') + str(cell) + filename.replace('ipynb', 'txt'))", "url_path_join as ujoin from notebook.base.handlers import IPythonHandler import os, json, git, urllib, requests", "from notebook.utils import url_path_join as ujoin from notebook.base.handlers import IPythonHandler import os, json,", "git, urllib, requests from git import Repo, GitCommandError from subprocess import check_output import", "{ \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\":", "\"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" }", "\"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def delete_cell(): # if cell in", "htable = [] config = { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", #", "\"main\", # \"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } #", "# \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def delete_cell(): # if cell in htable: #", "register_cell(cell, content): # filename = str(config['GIT_PARENT_DIR'] + \"/\" + os.environ.get('GIT_REPO_NAME') + str(cell) +", ": \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def delete_cell(): #", "= [] config = { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\"", "\"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def delete_cell():", "import IPythonHandler import os, json, git, urllib, requests from git import Repo, GitCommandError", "from notebook.base.handlers import IPythonHandler import os, json, git, urllib, requests from git import", "subprocess import check_output import subprocess repo = None htable = [] config =", "# \"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def", "# \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\" } # def delete_cell(): # if cell", "# def delete_cell(): # if cell in htable: # del htable[cell] # return", "os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\": \"main\", # \"GIT_REMOTE_URL\" : \"alpaca\", # \"GIT_REMOTE_UPSTREAM\": \"alpaca\", # \"GITHUB_ACCESS_TOKEN\": \"<PASSWORD>\"", "json, git, urllib, requests from git import Repo, GitCommandError from subprocess import check_output", "= None htable = [] config = { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\": os.path.expanduser(\"~/Desktop/jupyter_versioning\"), \"GIT_BRANCH_NAME\":", "from git import Repo, GitCommandError from subprocess import check_output import subprocess repo =", "in htable: # del htable[cell] # return True # return False # def", "notebook.utils import url_path_join as ujoin from notebook.base.handlers import IPythonHandler import os, json, git,", "} # def delete_cell(): # if cell in htable: # del htable[cell] #", "True # return False # def register_cell(cell, content): # filename = str(config['GIT_PARENT_DIR'] +", "subprocess repo = None htable = [] config = { \"GIT_USER\": \"alpaca\", \"GIT_PARENT_DIR\":", "htable[cell] # return True # return False # def register_cell(cell, content): # filename" ]
[ "pymongo import MongoClient from settings import MONGO_URL client = MongoClient(MONGO_URL) db = client.rolz_database", "from pymongo import MongoClient from settings import MONGO_URL client = MongoClient(MONGO_URL) db =" ]
[ "or (n == 2500): continue if expr(n): cnt += 1 print(\"cnt = %d\"", "b return c < 0 cnt = 0 for n in range(1, sys.maxsize):", "n in range(1, sys.maxsize): # print(\"n = %d\" % n) if (n ==", "- b return c < 0 cnt = 0 for n in range(1,", "for n in range(1, sys.maxsize): # print(\"n = %d\" % n) if (n", "# print(\"n = %d\" % n) if (n == 2018) or (n ==", "a - b return c < 0 cnt = 0 for n in", "sys def expr(x): a = x / (x - 2018) b = (x", "= x / (x - 2018) b = (x - 500) / (x", "= a - b return c < 0 cnt = 0 for n", "/ (x - 2018) b = (x - 500) / (x - 2500)", "(x - 500) / (x - 2500) c = a - b return", "c < 0 cnt = 0 for n in range(1, sys.maxsize): # print(\"n", "cnt = 0 for n in range(1, sys.maxsize): # print(\"n = %d\" %", "= 0 for n in range(1, sys.maxsize): # print(\"n = %d\" % n)", "range(1, sys.maxsize): # print(\"n = %d\" % n) if (n == 2018) or", "a = x / (x - 2018) b = (x - 500) /", "500) / (x - 2500) c = a - b return c <", "- 2500) c = a - b return c < 0 cnt =", "return c < 0 cnt = 0 for n in range(1, sys.maxsize): #", "= (x - 500) / (x - 2500) c = a - b", "== 2500): continue if expr(n): cnt += 1 print(\"cnt = %d\" % cnt)", "(n == 2500): continue if expr(n): cnt += 1 print(\"cnt = %d\" %", "(x - 2500) c = a - b return c < 0 cnt", "import sys def expr(x): a = x / (x - 2018) b =", "= %d\" % n) if (n == 2018) or (n == 2500): continue", "(n == 2018) or (n == 2500): continue if expr(n): cnt += 1", "expr(x): a = x / (x - 2018) b = (x - 500)", "/ (x - 2500) c = a - b return c < 0", "0 cnt = 0 for n in range(1, sys.maxsize): # print(\"n = %d\"", "< 0 cnt = 0 for n in range(1, sys.maxsize): # print(\"n =", "n) if (n == 2018) or (n == 2500): continue if expr(n): cnt", "(x - 2018) b = (x - 500) / (x - 2500) c", "sys.maxsize): # print(\"n = %d\" % n) if (n == 2018) or (n", "2018) or (n == 2500): continue if expr(n): cnt += 1 print(\"cnt =", "%d\" % n) if (n == 2018) or (n == 2500): continue if", "<reponame>gusenov/code-stepik-org-entrance-exam<gh_stars>1-10 import sys def expr(x): a = x / (x - 2018) b", "== 2018) or (n == 2500): continue if expr(n): cnt += 1 print(\"cnt", "2500) c = a - b return c < 0 cnt = 0", "print(\"n = %d\" % n) if (n == 2018) or (n == 2500):", "- 500) / (x - 2500) c = a - b return c", "c = a - b return c < 0 cnt = 0 for", "def expr(x): a = x / (x - 2018) b = (x -", "2018) b = (x - 500) / (x - 2500) c = a", "b = (x - 500) / (x - 2500) c = a -", "0 for n in range(1, sys.maxsize): # print(\"n = %d\" % n) if", "- 2018) b = (x - 500) / (x - 2500) c =", "if (n == 2018) or (n == 2500): continue if expr(n): cnt +=", "x / (x - 2018) b = (x - 500) / (x -", "in range(1, sys.maxsize): # print(\"n = %d\" % n) if (n == 2018)", "% n) if (n == 2018) or (n == 2500): continue if expr(n):" ]
[ "@dataclass class HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return", "Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k) for k, v in self.to_dict().items()],", "Dict, List, Iterator, Tuple, Union import pandas as pd from covid_model_seiir_pipeline.lib import (", "for regression data.\"\"\" from dataclasses import dataclass from typing import Dict, List, Iterator,", "class HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self)", "Tuple, Union import pandas as pd from covid_model_seiir_pipeline.lib import ( utilities, ) @dataclass", "pd.Series idr: pd.Series def to_dict(self) -> Dict[str, Union[int, pd.Series]]: return utilities.asdict(self) @dataclass class", "v in self.to_dict().items()], axis=1) @dataclass class HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions: pd.Series", "dataclass from typing import Dict, List, Iterator, Tuple, Union import pandas as pd", "v in self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series def to_dict(self)", "return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalMetrics: hospital_admissions: pd.Series", "infection_to_admission: int infection_to_case: int ifr: pd.Series ifr_hr: pd.Series ifr_lr: pd.Series ihr: pd.Series idr:", "HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def", "Iterator, Tuple, Union import pandas as pd from covid_model_seiir_pipeline.lib import ( utilities, )", "Union import pandas as pd from covid_model_seiir_pipeline.lib import ( utilities, ) @dataclass class", "import Dict, List, Iterator, Tuple, Union import pandas as pd from covid_model_seiir_pipeline.lib import", "pd.Series def to_dict(self) -> Dict[str, Union[int, pd.Series]]: return utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census:", "self.to_dict().items()], axis=1) @dataclass class HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions: pd.Series icu_census: pd.Series", "infection_to_case: int ifr: pd.Series ifr_hr: pd.Series ifr_lr: pd.Series ihr: pd.Series idr: pd.Series def", "axis=1) @dataclass class HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]:", "utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]:", "-> Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k) for k, v in", "from dataclasses import dataclass from typing import Dict, List, Iterator, Tuple, Union import", "for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series", "class RatioData: infection_to_death: int infection_to_admission: int infection_to_case: int ifr: pd.Series ifr_hr: pd.Series ifr_lr:", "List, Iterator, Tuple, Union import pandas as pd from covid_model_seiir_pipeline.lib import ( utilities,", "data.\"\"\" from dataclasses import dataclass from typing import Dict, List, Iterator, Tuple, Union", "( utilities, ) @dataclass class RatioData: infection_to_death: int infection_to_admission: int infection_to_case: int ifr:", "import pandas as pd from covid_model_seiir_pipeline.lib import ( utilities, ) @dataclass class RatioData:", "RatioData: infection_to_death: int infection_to_admission: int infection_to_case: int ifr: pd.Series ifr_hr: pd.Series ifr_lr: pd.Series", "utilities, ) @dataclass class RatioData: infection_to_death: int infection_to_admission: int infection_to_case: int ifr: pd.Series", "pd.Series ihr: pd.Series idr: pd.Series def to_dict(self) -> Dict[str, Union[int, pd.Series]]: return utilities.asdict(self)", "icu_admissions: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self):", "ihr: pd.Series idr: pd.Series def to_dict(self) -> Dict[str, Union[int, pd.Series]]: return utilities.asdict(self) @dataclass", "Union[int, pd.Series]]: return utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series def to_dict(self)", "<gh_stars>0 \"\"\"Containers for regression data.\"\"\" from dataclasses import dataclass from typing import Dict,", "pd from covid_model_seiir_pipeline.lib import ( utilities, ) @dataclass class RatioData: infection_to_death: int infection_to_admission:", "from covid_model_seiir_pipeline.lib import ( utilities, ) @dataclass class RatioData: infection_to_death: int infection_to_admission: int", "pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors: hospital_census: pd.Series icu_census:", "return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors: hospital_census: pd.Series", "HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str,", "int ifr: pd.Series ifr_hr: pd.Series ifr_lr: pd.Series ihr: pd.Series idr: pd.Series def to_dict(self)", "ifr_hr: pd.Series ifr_lr: pd.Series ihr: pd.Series idr: pd.Series def to_dict(self) -> Dict[str, Union[int,", "def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k) for k,", "in self.to_dict().items()], axis=1) @dataclass class HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions: pd.Series icu_census:", "hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self):", "to_df(self): return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors: hospital_census:", "pandas as pd from covid_model_seiir_pipeline.lib import ( utilities, ) @dataclass class RatioData: infection_to_death:", "class HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions: pd.Series icu_census: pd.Series def to_dict(self) ->", "for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series", "int infection_to_admission: int infection_to_case: int ifr: pd.Series ifr_hr: pd.Series ifr_lr: pd.Series ihr: pd.Series", "import ( utilities, ) @dataclass class RatioData: infection_to_death: int infection_to_admission: int infection_to_case: int", "pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalMetrics: hospital_admissions: pd.Series hospital_census:", "regression data.\"\"\" from dataclasses import dataclass from typing import Dict, List, Iterator, Tuple,", "covid_model_seiir_pipeline.lib import ( utilities, ) @dataclass class RatioData: infection_to_death: int infection_to_admission: int infection_to_case:", "self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str,", "Dict[str, Union[int, pd.Series]]: return utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series def", "from typing import Dict, List, Iterator, Tuple, Union import pandas as pd from", ") @dataclass class RatioData: infection_to_death: int infection_to_admission: int infection_to_case: int ifr: pd.Series ifr_hr:", "pd.Series icu_admissions: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def", "pd.Series hospital_census: pd.Series icu_admissions: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return", "HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def", "as pd from covid_model_seiir_pipeline.lib import ( utilities, ) @dataclass class RatioData: infection_to_death: int", "@dataclass class RatioData: infection_to_death: int infection_to_admission: int infection_to_case: int ifr: pd.Series ifr_hr: pd.Series", "pd.Series]]: return utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) ->", "pd.Series ifr_hr: pd.Series ifr_lr: pd.Series ihr: pd.Series idr: pd.Series def to_dict(self) -> Dict[str,", "infection_to_death: int infection_to_admission: int infection_to_case: int ifr: pd.Series ifr_hr: pd.Series ifr_lr: pd.Series ihr:", "def to_df(self): return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors:", "\"\"\"Containers for regression data.\"\"\" from dataclasses import dataclass from typing import Dict, List,", "@dataclass class HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return", "pd.Series ifr_lr: pd.Series ihr: pd.Series idr: pd.Series def to_dict(self) -> Dict[str, Union[int, pd.Series]]:", "utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class", "typing import Dict, List, Iterator, Tuple, Union import pandas as pd from covid_model_seiir_pipeline.lib", "pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self): return", "ifr: pd.Series ifr_hr: pd.Series ifr_lr: pd.Series ihr: pd.Series idr: pd.Series def to_dict(self) ->", "class HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self)", "k, v in self.to_dict().items()], axis=1) @dataclass class HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions:", "k, v in self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series def", "pd.Series]: return utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1)", "hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]:", "-> Dict[str, Union[int, pd.Series]]: return utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series", "to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k) for k, v", "to_dict(self) -> Dict[str, Union[int, pd.Series]]: return utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census: pd.Series icu_census:", "pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k) for", "to_df(self): return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalMetrics: hospital_admissions:", "import dataclass from typing import Dict, List, Iterator, Tuple, Union import pandas as", "def to_dict(self) -> Dict[str, Union[int, pd.Series]]: return utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census: pd.Series", "def to_df(self): return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass class HospitalMetrics:", "return utilities.asdict(self) @dataclass class HospitalCensusData: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str,", "axis=1) @dataclass class HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions: pd.Series icu_census: pd.Series def", "idr: pd.Series def to_dict(self) -> Dict[str, Union[int, pd.Series]]: return utilities.asdict(self) @dataclass class HospitalCensusData:", "hospital_census: pd.Series icu_admissions: pd.Series icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self)", "in self.to_dict().items()], axis=1) @dataclass class HospitalCorrectionFactors: hospital_census: pd.Series icu_census: pd.Series def to_dict(self) ->", "@dataclass class HospitalMetrics: hospital_admissions: pd.Series hospital_census: pd.Series icu_admissions: pd.Series icu_census: pd.Series def to_dict(self)", "ifr_lr: pd.Series ihr: pd.Series idr: pd.Series def to_dict(self) -> Dict[str, Union[int, pd.Series]]: return", "return utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k) for k, v in self.to_dict().items()], axis=1) @dataclass", "int infection_to_case: int ifr: pd.Series ifr_hr: pd.Series ifr_lr: pd.Series ihr: pd.Series idr: pd.Series", "dataclasses import dataclass from typing import Dict, List, Iterator, Tuple, Union import pandas", "icu_census: pd.Series def to_dict(self) -> Dict[str, pd.Series]: return utilities.asdict(self) def to_df(self): return pd.concat([v.rename(k)" ]
[ "True g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2, 1,", "[0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Pie(): g =", "\"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\", ] g = schedule.Schedule(", "21], 'title': 'series 1', } ) return g def generate_samples(): yield 'Plot', sample_Plot()", "sample_Line() class SampleBar: fields = ['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod def vertical(cls):", "'min_x_value': 0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines': True, }", "a .text attribute will do; # we like namedtuple(). from collections import namedtuple", "area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2, 3,", "horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height =", "{ 'data': [ Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73, 'IE'), Datum(0, 0, 'IS'), ],", "namedtuple(\"Datum\", \"x y text\") g.add_data( { 'data': [ Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73,", "'Newspaper', 'Magazine', 'Radio'] @classmethod def vertical(cls): g = bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers", "generate_samples(): res = sample.burn() with open(os.path.join(root, sample_name + '.py.svg'), 'w') as f: f.write(res)", "'title': 'Male'}) return g def sample_Schedule(): title = \"Billy's Schedule\" data1 = [", "sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal()", "30], [3, 45]], 'title': 'series 1'}) g.add_data({'data': [[1, 30], [2, 31], [3, 40]],", "scale_integers=True, stack='side', width=640, height=480, graph_title='Question 8', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98,", "sample in generate_samples(): res = sample.burn() with open(os.path.join(root, sample_name + '.py.svg'), 'w') as", "return g def sample_Schedule(): title = \"Billy's Schedule\" data1 = [ \"History 107\",", "= os.path.dirname(__file__) for sample_name, sample in generate_samples(): res = sample.burn() with open(os.path.join(root, sample_name", "0, 'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines': True, } ) g.add_data({'data': [[1,", "2015 # Any object with a .text attribute will do; # we like", "82], title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193, 105], title='old')) return g @classmethod def vertical_top(cls):", "g.add_data( { 'data': [ Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73, 'IE'), Datum(0, 0, 'IS'),", "= dict( scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False, ) g.__dict__.update(options)", "width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1,", "\"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\", ] g =", "show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, # not yet implemented", "'4 hours' g.stagger_x_labels = True g.x_label_format = '%d-%b %H:%M' # g.max_y_value = 200", "def sample_Line(): g = line.Line() options = dict( scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields,", "= \"Billy's Schedule\" data1 = [ \"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\",", "3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title':", "g = bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height = 640,", "of the various charts. Run this script to generate the reference samples. \"\"\"", "[3, 10.5]], 'title': 'series 3'}) return g def sample_PlotTextLabels(): g = Plot( {", "g.max_y_value = 200 g.add_data( { 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title': 'series 1',", "SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie() yield 'Schedule', sample_Schedule()", "script to generate the reference samples. \"\"\" import os from svg.charts.plot import Plot", "['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title': 'series 1', } ) return g def generate_samples():", "'title': 'Male'}) return g def sample_Pie(): g = pie.Pie({}) options = dict( width=640,", "20, '2005-12-22T00:00:00', 21], 'title': 'series 1', } ) return g def generate_samples(): yield", "45]], 'title': 'series 1'}) g.add_data({'data': [[1, 30], [2, 31], [3, 40]], 'title': 'series", "def sample_Pie(): g = pie.Pie({}) options = dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question 7',", "= 200 g.add_data( { 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title': 'series 1', }", "True g.width, g.height = 640, 480 g.graph_title = 'Question 7' g.show_graph_title = True", "return g @classmethod def vertical_large(cls): g = bar.VerticalBar(cls.fields) options = dict( scale_integers=True, stack='side',", "g def generate_samples(): yield 'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield", "= bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height = 640, 480", "with a .text attribute will do; # we like namedtuple(). from collections import", "bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack == 'top' g.scale_integers = True g.width, g.height = 640,", "g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2, 1,", "'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title': 'series 1', } ) return g def", "105\", \"7/7/04\", \"8/16/04\", ] g = schedule.Schedule( dict( width=640, height=480, graph_title=title, show_graph_title=True, key=False,", "'Question 7' g.show_graph_title = True g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'})", "hours' g.stagger_x_labels = True g.x_label_format = '%d-%b %H:%M' # g.max_y_value = 200 g.add_data(", "640, 480 g.graph_title = 'Question 7' g.show_graph_title = True g.add_data({'data': [-2, 3, 1,", "svg.charts.plot import Plot from svg.charts import bar from svg.charts import time_series from svg.charts", "'Pie', sample_Pie() yield 'Schedule', sample_Schedule() yield 'Line', sample_Line() class SampleBar: fields = ['Internet',", "31], [3, 40]], 'title': 'series 2'}) g.add_data({'data': [[0.5, 35], [1, 20], [3, 10.5]],", "'title': 'Male'}) return g @classmethod def vertical_large(cls): g = bar.VerticalBar(cls.fields) options = dict(", "@classmethod def vertical(cls): g = bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width,", "g.scale_integers = True g.width, g.height = 640, 480 g.graph_title = 'Question 7' g.show_graph_title", "samples. \"\"\" import os from svg.charts.plot import Plot from svg.charts import bar from", "scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2,", "data1 = [ \"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\",", "# Processed Apple production 2015 # Any object with a .text attribute will", "g = pie.Pie({}) options = dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True,", "'title': 'series 1', } ) return g def generate_samples(): yield 'Plot', sample_Plot() yield", "= True g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2,", "show_x_title=True, # not yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\",", "yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie() yield 'Schedule', sample_Schedule() yield", "yield 'Pie', sample_Pie() yield 'Schedule', sample_Schedule() yield 'Line', sample_Line() class SampleBar: fields =", "4], 'title': 'Male'}) return g def sample_Schedule(): title = \"Billy's Schedule\" data1 =", "height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, # not", "yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield", "width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1,", "80.85, 'ES'), Datum(0.17, 6.73, 'IE'), Datum(0, 0, 'IS'), ], 'title': 'Processed Apple', }", "week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\")) return g def save_samples(): root", "g = bar.VerticalBar(cls.fields) options = dict( scale_integers=True, stack='side', width=640, height=480, graph_title='Question 8', show_graph_title=True,", "import namedtuple Datum = namedtuple(\"Datum\", \"x y text\") g.add_data( { 'data': [ Datum(8.24,", "def vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack == 'top' g.scale_integers = True", "1'}) g.add_data({'data': [[1, 30], [2, 31], [3, 40]], 'title': 'series 2'}) g.add_data({'data': [[0.5,", "Plot( { 'min_x_value': 0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines':", "[0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Schedule(): title =", "def vertical(cls): g = bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height", "show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, # not yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False,", "22, 98, 143, 82], title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193, 105], title='old')) return g", "35], [1, 20], [3, 10.5]], 'title': 'series 3'}) return g def sample_PlotTextLabels(): g", "'series 3'}) return g def sample_PlotTextLabels(): g = Plot( { 'draw_lines_between_points': False, 'min_x_value':", "98, 143, 82], title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193, 105], title='old')) return g @classmethod", "width=640, height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, #", "Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73, 'IE'), Datum(0, 0, 'IS'), ], 'title': 'Processed Apple',", "'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop',", "return g def save_samples(): root = os.path.dirname(__file__) for sample_name, sample in generate_samples(): res", "'title': 'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g @classmethod", "# g.max_y_value = 200 g.add_data( { 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title': 'series", "'title': 'Male'}) return g @classmethod def horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack = 'side'", "480 g.graph_title = 'Question 7' g.show_graph_title = True g.add_data({'data': [-2, 3, 1, 3,", "various charts. Run this script to generate the reference samples. \"\"\" import os", "production 2015 # Any object with a .text attribute will do; # we", "SampleBar: fields = ['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod def vertical(cls): g =", "40]], 'title': 'series 2'}) g.add_data({'data': [[0.5, 35], [1, 20], [3, 10.5]], 'title': 'series", "from svg.charts import time_series from svg.charts import pie from svg.charts import schedule from", "1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'})", "[0, 2, 1, 5, 4], 'title': 'Male'}) return g @classmethod def horizontal(cls): g", "bar.VerticalBar(cls.fields) options = dict( scale_integers=True, stack='side', width=640, height=480, graph_title='Question 8', show_graph_title=True, no_css=False, )", "} ) g.add_data({'data': [[1, 25], [2, 30], [3, 45]], 'title': 'series 1'}) g.add_data({'data':", "'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines': True, } ) # Processed Apple", "True g.x_label_format = '%d-%b %H:%M' # g.max_y_value = 200 g.add_data( { 'data': ['2005-12-21T00:00:00',", "g.graph_title = 'Question 7' g.show_graph_title = True g.add_data({'data': [-2, 3, 1, 3, 1],", "= '4 hours' g.stagger_x_labels = True g.x_label_format = '%d-%b %H:%M' # g.max_y_value =", "4], 'title': 'Male'}) return g @classmethod def vertical_large(cls): g = bar.VerticalBar(cls.fields) options =", "[2, 30], [3, 45]], 'title': 'series 1'}) g.add_data({'data': [[1, 30], [2, 31], [3,", "Apple', } ) return g def sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions = '4", "'Male'}) return g @classmethod def horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers", "line def sample_Plot(): g = Plot( { 'min_x_value': 0, 'min_y_value': 0, 'area_fill': True,", "def sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions = '4 hours' g.stagger_x_labels = True g.x_label_format", "options = dict( scale_integers=True, stack='side', width=640, height=480, graph_title='Question 8', show_graph_title=True, no_css=False, ) g.__dict__.update(options)", "def sample_PlotTextLabels(): g = Plot( { 'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines':", "g @classmethod def vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack == 'top' g.scale_integers", ") return g def generate_samples(): yield 'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries',", "'show_x_guidelines': True, } ) # Processed Apple production 2015 # Any object with", "'2005-12-22T00:00:00', 21], 'title': 'series 1', } ) return g def generate_samples(): yield 'Plot',", "sample_name, sample in generate_samples(): res = sample.burn() with open(os.path.join(root, sample_name + '.py.svg'), 'w')", "'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines': True, } ) # Processed Apple production 2015", "sample_Schedule() yield 'Line', sample_Line() class SampleBar: fields = ['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio']", "= sample.burn() with open(os.path.join(root, sample_name + '.py.svg'), 'w') as f: f.write(res) if __name__", "g @classmethod def horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers = True", "g.add_data( { 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title': 'series 1', } ) return", "4], 'title': 'Male'}) return g def sample_Line(): g = line.Line() options = dict(", "popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\")) return g def save_samples(): root =", "g def sample_Pie(): g = pie.Pie({}) options = dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question", "'Male'}) return g def sample_Pie(): g = pie.Pie({}) options = dict( width=640, height=480,", ") return g def sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions = '4 hours' g.stagger_x_labels", "\"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting", "'Male'}) return g def sample_Line(): g = line.Line() options = dict( scale_integers=True, area_fill=True,", "return g def sample_Line(): g = line.Line() options = dict( scale_integers=True, area_fill=True, width=640,", "= schedule.Schedule( dict( width=640, height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True,", ") g.add_data({'data': [[1, 25], [2, 30], [3, 45]], 'title': 'series 1'}) g.add_data({'data': [[1,", "def sample_Schedule(): title = \"Billy's Schedule\" data1 = [ \"History 107\", \"5/19/04\", \"6/30/04\",", "], 'title': 'Processed Apple', } ) return g def sample_TimeSeries(): g = time_series.Plot({})", "vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack == 'top' g.scale_integers = True g.width,", "5, 4], 'title': 'Male'}) return g def sample_Line(): g = line.Line() options =", "105], title='old')) return g @classmethod def vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack", "5, 4], 'title': 'Male'}) return g @classmethod def horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack", "= 'side' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title = 'Question", "svg.charts import pie from svg.charts import schedule from svg.charts import line def sample_Plot():", "'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie() yield 'Schedule', sample_Schedule() yield 'Line', sample_Line() class SampleBar:", "charts. Run this script to generate the reference samples. \"\"\" import os from", "root = os.path.dirname(__file__) for sample_name, sample in generate_samples(): res = sample.burn() with open(os.path.join(root,", "g = bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height = 640,", "2, 1, 5, 4], 'title': 'Male'}) return g @classmethod def vertical_large(cls): g =", "'title': 'series 2'}) g.add_data({'data': [[0.5, 35], [1, 20], [3, 10.5]], 'title': 'series 3'})", "1, 5, 4], 'title': 'Male'}) return g def sample_Schedule(): title = \"Billy's Schedule\"", "dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2, 3,", "0, 'min_y_value': 0, 'show_x_guidelines': True, } ) # Processed Apple production 2015 #", "# Any object with a .text attribute will do; # we like namedtuple().", "sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions = '4 hours' g.stagger_x_labels = True g.x_label_format =", "'Processed Apple', } ) return g def sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions =", "1, 5, 4], 'title': 'Male'}) return g @classmethod def horizontal(cls): g = bar.HorizontalBar(cls.fields)", "\"Acting 105\", \"7/7/04\", \"8/16/04\", ] g = schedule.Schedule( dict( width=640, height=480, graph_title=title, show_graph_title=True,", "in generate_samples(): res = sample.burn() with open(os.path.join(root, sample_name + '.py.svg'), 'w') as f:", "True, 'stagger_y_labels': True, 'show_x_guidelines': True, } ) g.add_data({'data': [[1, 25], [2, 30], [3,", "sample.burn() with open(os.path.join(root, sample_name + '.py.svg'), 'w') as f: f.write(res) if __name__ ==", "height=480, graph_title='Question 8', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143, 82], title='intermediate'))", "[3, 45]], 'title': 'series 1'}) g.add_data({'data': [[1, 30], [2, 31], [3, 40]], 'title':", "3'}) return g def sample_PlotTextLabels(): g = Plot( { 'draw_lines_between_points': False, 'min_x_value': 0,", "20], [3, 10.5]], 'title': 'series 3'}) return g def sample_PlotTextLabels(): g = Plot(", "g def sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions = '4 hours' g.stagger_x_labels = True", "1, 5, 4], 'title': 'Male'}) return g def sample_Pie(): g = pie.Pie({}) options", "yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield", "svg.charts import bar from svg.charts import time_series from svg.charts import pie from svg.charts", "sample_Plot(): g = Plot( { 'min_x_value': 0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels': True,", "101\", \"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\", ] g = schedule.Schedule( dict( width=640,", "} ) return g def generate_samples(): yield 'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield", "reference samples. \"\"\" import os from svg.charts.plot import Plot from svg.charts import bar", "line.Line() options = dict( scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False,", "Run this script to generate the reference samples. \"\"\" import os from svg.charts.plot", "x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\")) return g def", "True, 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines': True, } ) g.add_data({'data': [[1, 25], [2,", "= bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height = 640, 480", "%H:%M' # g.max_y_value = 200 g.add_data( { 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title':", "SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie()", "['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod def vertical(cls): g = bar.VerticalBar(cls.fields) g.stack =", ".text attribute will do; # we like namedtuple(). from collections import namedtuple Datum", "min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\")) return g def save_samples(): root = os.path.dirname(__file__) for", "g = Plot( { 'min_x_value': 0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels':", "key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, # not yet implemented x_title=\"Time\",", "'min_y_value': 0, 'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines': True, } ) g.add_data({'data':", "bar from svg.charts import time_series from svg.charts import pie from svg.charts import schedule", "g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Pie(): g", "= True g.width, g.height = 640, 480 g.graph_title = 'Question 7' g.show_graph_title =", "title = \"Billy's Schedule\" data1 = [ \"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\",", "[3, 40]], 'title': 'series 2'}) g.add_data({'data': [[0.5, 35], [1, 20], [3, 10.5]], 'title':", "show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143, 82], title='intermediate')) g.add_data(dict(data=[2, 26, 106,", "\"8/16/04\", ] g = schedule.Schedule( dict( width=640, height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True,", "'side' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title = 'Question 7'", "y text\") g.add_data( { 'data': [ Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73, 'IE'), Datum(0,", "\"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\", ] g = schedule.Schedule( dict( width=640, height=480,", "2'}) g.add_data({'data': [[0.5, 35], [1, 20], [3, 10.5]], 'title': 'series 3'}) return g", "g def save_samples(): root = os.path.dirname(__file__) for sample_name, sample in generate_samples(): res =", "'series 1'}) g.add_data({'data': [[1, 30], [2, 31], [3, 40]], 'title': 'series 2'}) g.add_data({'data':", "\"x y text\") g.add_data( { 'data': [ Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73, 'IE'),", "'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie',", "106, 193, 105], title='old')) return g @classmethod def vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top'))", "g.stack = 'side' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title =", "g = Plot( { 'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines': True, }", "'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g @classmethod def", "Datum(0.17, 6.73, 'IE'), Datum(0, 0, 'IS'), ], 'title': 'Processed Apple', } ) return", "\"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\",", "{ 'min_x_value': 0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines': True,", "assert g.stack == 'top' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title", "save_samples(): root = os.path.dirname(__file__) for sample_name, sample in generate_samples(): res = sample.burn() with", "show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0,", "to generate the reference samples. \"\"\" import os from svg.charts.plot import Plot from", "generate_samples(): yield 'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical()", "Any object with a .text attribute will do; # we like namedtuple(). from", "schedule.Schedule( dict( width=640, height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, #", "# we like namedtuple(). from collections import namedtuple Datum = namedtuple(\"Datum\", \"x y", "25], [2, 30], [3, 45]], 'title': 'series 1'}) g.add_data({'data': [[1, 30], [2, 31],", "'Radio'] @classmethod def vertical(cls): g = bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers = True", "'title': 'series 1'}) g.add_data({'data': [[1, 30], [2, 31], [3, 40]], 'title': 'series 2'})", "[[0.5, 35], [1, 20], [3, 10.5]], 'title': 'series 3'}) return g def sample_PlotTextLabels():", "= [ \"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\",", "= dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2,", "open(os.path.join(root, sample_name + '.py.svg'), 'w') as f: f.write(res) if __name__ == '__main__': save_samples()", "True, 'show_x_guidelines': True, } ) g.add_data({'data': [[1, 25], [2, 30], [3, 45]], 'title':", "\"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\", ] g = schedule.Schedule( dict( width=640, height=480, graph_title=title,", "3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return", "Samples of the various charts. Run this script to generate the reference samples.", "title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193, 105], title='old')) return g @classmethod def vertical_top(cls): g", "0, 'IS'), ], 'title': 'Processed Apple', } ) return g def sample_TimeSeries(): g", "no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0,", "sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large()", "True, } ) # Processed Apple production 2015 # Any object with a", "g @classmethod def vertical_large(cls): g = bar.VerticalBar(cls.fields) options = dict( scale_integers=True, stack='side', width=640,", "bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title", "False, 'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines': True, } ) # Processed Apple production", "return g def sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions = '4 hours' g.stagger_x_labels =", "= namedtuple(\"Datum\", \"x y text\") g.add_data( { 'data': [ Datum(8.24, 80.85, 'ES'), Datum(0.17,", "] g = schedule.Schedule( dict( width=640, height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True,", "g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2, 1, 5,", "1], 'title': 'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g", "return g def generate_samples(): yield 'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries()", "svg.charts import line def sample_Plot(): g = Plot( { 'min_x_value': 0, 'min_y_value': 0,", "import bar from svg.charts import time_series from svg.charts import pie from svg.charts import", "= 'Question 7' g.show_graph_title = True g.add_data({'data': [-2, 3, 1, 3, 1], 'title':", "rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\")) return", "import os from svg.charts.plot import Plot from svg.charts import bar from svg.charts import", "namedtuple Datum = namedtuple(\"Datum\", \"x y text\") g.add_data( { 'data': [ Datum(8.24, 80.85,", "dict( width=640, height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True,", "svg.charts import time_series from svg.charts import pie from svg.charts import schedule from svg.charts", "'title': 'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g def", "import line def sample_Plot(): g = Plot( { 'min_x_value': 0, 'min_y_value': 0, 'area_fill':", "stack='side', width=640, height=480, graph_title='Question 8', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143,", "'Line', sample_Line() class SampleBar: fields = ['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod def", "from svg.charts import bar from svg.charts import time_series from svg.charts import pie from", "g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g @classmethod def horizontal(cls):", "time_series from svg.charts import pie from svg.charts import schedule from svg.charts import line", "area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\")) return g def save_samples(): root = os.path.dirname(__file__)", "= 640, 480 g.graph_title = 'Question 7' g.show_graph_title = True g.add_data({'data': [-2, 3,", "Datum(0, 0, 'IS'), ], 'title': 'Processed Apple', } ) return g def sample_TimeSeries():", "show_x_guidelines=True, # show_x_title=True, # not yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1", "yield 'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield", "yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, )", "'ES'), Datum(0.17, 6.73, 'IE'), Datum(0, 0, 'IS'), ], 'title': 'Processed Apple', } )", "\"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\", ]", "will do; # we like namedtuple(). from collections import namedtuple Datum = namedtuple(\"Datum\",", "'series 2'}) g.add_data({'data': [[0.5, 35], [1, 20], [3, 10.5]], 'title': 'series 3'}) return", "= bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack == 'top' g.scale_integers = True g.width, g.height =", "this script to generate the reference samples. \"\"\" import os from svg.charts.plot import", "1', } ) return g def generate_samples(): yield 'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels()", "'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Pie():", "'show_x_guidelines': True, } ) g.add_data({'data': [[1, 25], [2, 30], [3, 45]], 'title': 'series", "'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Line():", "} ) # Processed Apple production 2015 # Any object with a .text", "height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3,", "26, 106, 193, 105], title='old')) return g @classmethod def vertical_top(cls): g = bar.VerticalBar(cls.fields,", "g.add_data({'data': [[1, 25], [2, 30], [3, 45]], 'title': 'series 1'}) g.add_data({'data': [[1, 30],", "return g def sample_PlotTextLabels(): g = Plot( { 'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value':", "@classmethod def vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack == 'top' g.scale_integers =", "g.add_data({'data': [[0.5, 35], [1, 20], [3, 10.5]], 'title': 'series 3'}) return g def", "7', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'})", "g def sample_Line(): g = line.Line() options = dict( scale_integers=True, area_fill=True, width=640, height=480,", "height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3,", ") ) g.add_data(dict(data=data1, title=\"Data\")) return g def save_samples(): root = os.path.dirname(__file__) for sample_name,", "'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar',", "g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143, 82], title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193, 105], title='old'))", "[0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Line(): g =", "\"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\",", "[2, 31], [3, 40]], 'title': 'series 2'}) g.add_data({'data': [[0.5, 35], [1, 20], [3,", "g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Line(): g", "schedule from svg.charts import line def sample_Plot(): g = Plot( { 'min_x_value': 0,", "text\") g.add_data( { 'data': [ Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73, 'IE'), Datum(0, 0,", "yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie() yield 'Schedule', sample_Schedule() yield 'Line', sample_Line() class", "options = dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data':", "vertical(cls): g = bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height =", ") g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143, 82], title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193, 105],", "# show_x_title=True, # not yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\",", "[1, 20], [3, 10.5]], 'title': 'series 3'}) return g def sample_PlotTextLabels(): g =", "'data': [ Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73, 'IE'), Datum(0, 0, 'IS'), ], 'title':", "'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines': True, } ) g.add_data({'data': [[1, 25], [2, 30],", "2, 1, 5, 4], 'title': 'Male'}) return g def sample_Schedule(): title = \"Billy's", "like namedtuple(). from collections import namedtuple Datum = namedtuple(\"Datum\", \"x y text\") g.add_data(", "'Schedule', sample_Schedule() yield 'Line', sample_Line() class SampleBar: fields = ['Internet', 'TV', 'Newspaper', 'Magazine',", "pie.Pie({}) options = dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options)", "from svg.charts import schedule from svg.charts import line def sample_Plot(): g = Plot(", "'title': 'Processed Apple', } ) return g def sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions", "[ \"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\",", "= Plot( { 'min_x_value': 0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels': True,", "g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Schedule(): title", "Schedule\" data1 = [ \"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology", "= bar.VerticalBar(cls.fields) options = dict( scale_integers=True, stack='side', width=640, height=480, graph_title='Question 8', show_graph_title=True, no_css=False,", "vertical_large(cls): g = bar.VerticalBar(cls.fields) options = dict( scale_integers=True, stack='side', width=640, height=480, graph_title='Question 8',", "\"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\", ] g = schedule.Schedule( dict(", "options = dict( scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False, )", "yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield", "g def sample_Schedule(): title = \"Billy's Schedule\" data1 = [ \"History 107\", \"5/19/04\",", "'stagger_y_labels': True, 'show_x_guidelines': True, } ) g.add_data({'data': [[1, 25], [2, 30], [3, 45]],", "def sample_Plot(): g = Plot( { 'min_x_value': 0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels':", "g.stagger_x_labels = True g.x_label_format = '%d-%b %H:%M' # g.max_y_value = 200 g.add_data( {", "g = line.Line() options = dict( scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question 7',", "Plot from svg.charts import bar from svg.charts import time_series from svg.charts import pie", "\"\"\" Samples of the various charts. Run this script to generate the reference", "not yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0,", "= '%d-%b %H:%M' # g.max_y_value = 200 g.add_data( { 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00',", "'series 1', } ) return g def generate_samples(): yield 'Plot', sample_Plot() yield 'PlotTextLabels',", "1, 5, 4], 'title': 'Male'}) return g @classmethod def vertical_large(cls): g = bar.VerticalBar(cls.fields)", "the reference samples. \"\"\" import os from svg.charts.plot import Plot from svg.charts import", "True, } ) g.add_data({'data': [[1, 25], [2, 30], [3, 45]], 'title': 'series 1'})", "g.width, g.height = 640, 480 g.graph_title = 'Question 7' g.show_graph_title = True g.add_data({'data':", "g.add_data(dict(data=[2, 26, 106, 193, 105], title='old')) return g @classmethod def vertical_top(cls): g =", "'Magazine', 'Radio'] @classmethod def vertical(cls): g = bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers =", ") g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2,", "[0, 2, 1, 5, 4], 'title': 'Male'}) return g @classmethod def vertical_large(cls): g", "expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data':", "def horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height", "def generate_samples(): yield 'Plot', sample_Plot() yield 'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar',", "svg.charts import schedule from svg.charts import line def sample_Plot(): g = Plot( {", "res = sample.burn() with open(os.path.join(root, sample_name + '.py.svg'), 'w') as f: f.write(res) if", "width=640, height=480, graph_title='Question 8', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143, 82],", "sample_Schedule(): title = \"Billy's Schedule\" data1 = [ \"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra", "} ) return g def sample_TimeSeries(): g = time_series.Plot({}) g.timescale_divisions = '4 hours'", "= Plot( { 'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines': True, } )", "def save_samples(): root = os.path.dirname(__file__) for sample_name, sample in generate_samples(): res = sample.burn()", "object with a .text attribute will do; # we like namedtuple(). from collections", "g.x_label_format = '%d-%b %H:%M' # g.max_y_value = 200 g.add_data( { 'data': ['2005-12-21T00:00:00', 20,", "g.add_data({'data': [[1, 30], [2, 31], [3, 40]], 'title': 'series 2'}) g.add_data({'data': [[0.5, 35],", "attribute will do; # we like namedtuple(). from collections import namedtuple Datum =", "pie from svg.charts import schedule from svg.charts import line def sample_Plot(): g =", "import pie from svg.charts import schedule from svg.charts import line def sample_Plot(): g", "10.5]], 'title': 'series 3'}) return g def sample_PlotTextLabels(): g = Plot( { 'draw_lines_between_points':", ") # Processed Apple production 2015 # Any object with a .text attribute", "g = schedule.Schedule( dict( width=640, height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False,", "with open(os.path.join(root, sample_name + '.py.svg'), 'w') as f: f.write(res) if __name__ == '__main__':", "title='old')) return g @classmethod def vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack ==", "g = time_series.Plot({}) g.timescale_divisions = '4 hours' g.stagger_x_labels = True g.x_label_format = '%d-%b", "Processed Apple production 2015 # Any object with a .text attribute will do;", "graph_title='Question 7', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title':", "from collections import namedtuple Datum = namedtuple(\"Datum\", \"x y text\") g.add_data( { 'data':", "@classmethod def vertical_large(cls): g = bar.VerticalBar(cls.fields) options = dict( scale_integers=True, stack='side', width=640, height=480,", "5, 4], 'title': 'Male'}) return g def sample_Pie(): g = pie.Pie({}) options =", "== 'top' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title = 'Question", "= pie.Pie({}) options = dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True, )", "title=\"Data\")) return g def save_samples(): root = os.path.dirname(__file__) for sample_name, sample in generate_samples():", "yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie() yield", "g def sample_PlotTextLabels(): g = Plot( { 'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value': 0,", "2, 1, 5, 4], 'title': 'Male'}) return g def sample_Line(): g = line.Line()", "\"Billy's Schedule\" data1 = [ \"History 107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\",", "30], [2, 31], [3, 40]], 'title': 'series 2'}) g.add_data({'data': [[0.5, 35], [1, 20],", "os.path.dirname(__file__) for sample_name, sample in generate_samples(): res = sample.burn() with open(os.path.join(root, sample_name +", "g.stack == 'top' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title =", "bar.VerticalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title", "SampleBar.vertical_top() yield 'Pie', sample_Pie() yield 'Schedule', sample_Schedule() yield 'Line', sample_Line() class SampleBar: fields", "[[1, 30], [2, 31], [3, 40]], 'title': 'series 2'}) g.add_data({'data': [[0.5, 35], [1,", "g.show_graph_title = True g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0,", "import time_series from svg.charts import pie from svg.charts import schedule from svg.charts import", "'Female'}) g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g def sample_Schedule():", "generate the reference samples. \"\"\" import os from svg.charts.plot import Plot from svg.charts", "[-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data': [0, 2, 1, 5, 4],", "sample_Pie() yield 'Schedule', sample_Schedule() yield 'Line', sample_Line() class SampleBar: fields = ['Internet', 'TV',", "8', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143, 82], title='intermediate')) g.add_data(dict(data=[2, 26,", "from svg.charts import line def sample_Plot(): g = Plot( { 'min_x_value': 0, 'min_y_value':", "sample_Pie(): g = pie.Pie({}) options = dict( width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True,", "sample_Line(): g = line.Line() options = dict( scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question", "show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, # not yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\",", "from svg.charts import pie from svg.charts import schedule from svg.charts import line def", "fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1],", "fields = ['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod def vertical(cls): g = bar.VerticalBar(cls.fields)", "from svg.charts.plot import Plot from svg.charts import bar from svg.charts import time_series from", "'title': 'series 3'}) return g def sample_PlotTextLabels(): g = Plot( { 'draw_lines_between_points': False,", "'IS'), ], 'title': 'Processed Apple', } ) return g def sample_TimeSeries(): g =", "= ['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod def vertical(cls): g = bar.VerticalBar(cls.fields) g.stack", "'min_y_value': 0, 'show_x_guidelines': True, } ) # Processed Apple production 2015 # Any", "[ Datum(8.24, 80.85, 'ES'), Datum(0.17, 6.73, 'IE'), Datum(0, 0, 'IS'), ], 'title': 'Processed", "namedtuple(). from collections import namedtuple Datum = namedtuple(\"Datum\", \"x y text\") g.add_data( {", "= time_series.Plot({}) g.timescale_divisions = '4 hours' g.stagger_x_labels = True g.x_label_format = '%d-%b %H:%M'", "'IE'), Datum(0, 0, 'IS'), ], 'title': 'Processed Apple', } ) return g def", "4], 'title': 'Male'}) return g def sample_Pie(): g = pie.Pie({}) options = dict(", "'title': 'Male'}) return g def sample_Line(): g = line.Line() options = dict( scale_integers=True,", "os from svg.charts.plot import Plot from svg.charts import bar from svg.charts import time_series", "# not yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True,", "timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\")) return g def save_samples():", "x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1,", "107\", \"5/19/04\", \"6/30/04\", \"Algebra 011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting 105\",", "'Male'}) return g @classmethod def vertical_large(cls): g = bar.VerticalBar(cls.fields) options = dict( scale_integers=True,", "import Plot from svg.charts import bar from svg.charts import time_series from svg.charts import", "dict(stack='top')) assert g.stack == 'top' g.scale_integers = True g.width, g.height = 640, 480", "graph_title=title, show_graph_title=True, key=False, scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, # not yet", "{ 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title': 'series 1', } ) return g", "011\", \"6/2/04\", \"8/11/04\", \"Psychology 101\", \"6/28/04\", \"8/9/04\", \"Acting 105\", \"7/7/04\", \"8/16/04\", ] g", "sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top()", "the various charts. Run this script to generate the reference samples. \"\"\" import", "'%d-%b %H:%M' # g.max_y_value = 200 g.add_data( { 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21],", "g.height = 640, 480 g.graph_title = 'Question 7' g.show_graph_title = True g.add_data({'data': [-2,", "class SampleBar: fields = ['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod def vertical(cls): g", "do; # we like namedtuple(). from collections import namedtuple Datum = namedtuple(\"Datum\", \"x", "scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, # not yet implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True,", "show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data':", "Plot( { 'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines': True, } ) #", "implemented x_title=\"Time\", show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) )", "'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod def vertical(cls): g = bar.VerticalBar(cls.fields) g.stack = 'side'", "2, 1, 5, 4], 'title': 'Male'}) return g def sample_Pie(): g = pie.Pie({})", "0, 'min_y_value': 0, 'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines': True, } )", "import schedule from svg.charts import line def sample_Plot(): g = Plot( { 'min_x_value':", "143, 82], title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193, 105], title='old')) return g @classmethod def", "return g @classmethod def vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack == 'top'", "collections import namedtuple Datum = namedtuple(\"Datum\", \"x y text\") g.add_data( { 'data': [", "5, 4], 'title': 'Male'}) return g @classmethod def vertical_large(cls): g = bar.VerticalBar(cls.fields) options", "'PlotTextLabels', sample_PlotTextLabels() yield 'TimeSeries', sample_TimeSeries() yield 'VerticalBar', SampleBar.vertical() yield 'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge',", "g.add_data(dict(data=[2, 22, 98, 143, 82], title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193, 105], title='old')) return", "g.add_data(dict(data=data1, title=\"Data\")) return g def save_samples(): root = os.path.dirname(__file__) for sample_name, sample in", "'area_fill': True, 'stagger_x_labels': True, 'stagger_y_labels': True, 'show_x_guidelines': True, } ) g.add_data({'data': [[1, 25],", "we like namedtuple(). from collections import namedtuple Datum = namedtuple(\"Datum\", \"x y text\")", "= line.Line() options = dict( scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True,", "SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie() yield 'Schedule', sample_Schedule() yield 'Line', sample_Line()", "200 g.add_data( { 'data': ['2005-12-21T00:00:00', 20, '2005-12-22T00:00:00', 21], 'title': 'series 1', } )", "193, 105], title='old')) return g @classmethod def vertical_top(cls): g = bar.VerticalBar(cls.fields, dict(stack='top')) assert", "yield 'Schedule', sample_Schedule() yield 'Line', sample_Line() class SampleBar: fields = ['Internet', 'TV', 'Newspaper',", "'HorizontalBar', SampleBar.horizontal() yield 'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie() yield 'Schedule',", "Apple production 2015 # Any object with a .text attribute will do; #", "dict( scale_integers=True, stack='side', width=640, height=480, graph_title='Question 8', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22,", "'VerticalBarLarge', SampleBar.vertical_large() yield 'VerticalBarStackTop', SampleBar.vertical_top() yield 'Pie', sample_Pie() yield 'Schedule', sample_Schedule() yield 'Line',", "g.add_data({'data': [0, 2, 1, 5, 4], 'title': 'Male'}) return g @classmethod def vertical_large(cls):", "{ 'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines': True, } ) # Processed", "\"7/7/04\", \"8/16/04\", ] g = schedule.Schedule( dict( width=640, height=480, graph_title=title, show_graph_title=True, key=False, scale_x_integers=True,", "@classmethod def horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers = True g.width,", "7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'})", "Datum = namedtuple(\"Datum\", \"x y text\") g.add_data( { 'data': [ Datum(8.24, 80.85, 'ES'),", "5, 4], 'title': 'Male'}) return g def sample_Schedule(): title = \"Billy's Schedule\" data1", "[[1, 25], [2, 30], [3, 45]], 'title': 'series 1'}) g.add_data({'data': [[1, 30], [2,", "6.73, 'IE'), Datum(0, 0, 'IS'), ], 'title': 'Processed Apple', } ) return g", "sample_PlotTextLabels(): g = Plot( { 'draw_lines_between_points': False, 'min_x_value': 0, 'min_y_value': 0, 'show_x_guidelines': True,", "0, 'show_x_guidelines': True, } ) # Processed Apple production 2015 # Any object", "show_y_title=False, rotate_x_labels=True, rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\"))", "no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143, 82], title='intermediate')) g.add_data(dict(data=[2, 26, 106, 193,", "1, 5, 4], 'title': 'Male'}) return g def sample_Line(): g = line.Line() options", "g.timescale_divisions = '4 hours' g.stagger_x_labels = True g.x_label_format = '%d-%b %H:%M' # g.max_y_value", "'Male'}) return g def sample_Schedule(): title = \"Billy's Schedule\" data1 = [ \"History", "g = bar.VerticalBar(cls.fields, dict(stack='top')) assert g.stack == 'top' g.scale_integers = True g.width, g.height", "return g def sample_Pie(): g = pie.Pie({}) options = dict( width=640, height=480, fields=SampleBar.fields,", "\"\"\" import os from svg.charts.plot import Plot from svg.charts import bar from svg.charts", "time_series.Plot({}) g.timescale_divisions = '4 hours' g.stagger_x_labels = True g.x_label_format = '%d-%b %H:%M' #", "def vertical_large(cls): g = bar.VerticalBar(cls.fields) options = dict( scale_integers=True, stack='side', width=640, height=480, graph_title='Question", "'top' g.scale_integers = True g.width, g.height = 640, 480 g.graph_title = 'Question 7'", "for sample_name, sample in generate_samples(): res = sample.burn() with open(os.path.join(root, sample_name + '.py.svg'),", "dict( scale_integers=True, area_fill=True, width=640, height=480, fields=SampleBar.fields, graph_title='Question 7', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data({'data':", "return g @classmethod def horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack = 'side' g.scale_integers =", "scale_x_integers=True, scale_y_integers=True, show_data_labels=True, show_y_guidelines=False, show_x_guidelines=True, # show_x_title=True, # not yet implemented x_title=\"Time\", show_y_title=False,", "= True g.x_label_format = '%d-%b %H:%M' # g.max_y_value = 200 g.add_data( { 'data':", "= dict( scale_integers=True, stack='side', width=640, height=480, graph_title='Question 8', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2,", "fields=SampleBar.fields, graph_title='Question 7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1],", "2, 1, 5, 4], 'title': 'Male'}) return g @classmethod def horizontal(cls): g =", "graph_title='Question 7', expand_greatest=True, show_data_labels=True, ) g.__dict__.update(options) g.add_data({'data': [-2, 3, 1, 3, 1], 'title':", "4], 'title': 'Male'}) return g @classmethod def horizontal(cls): g = bar.HorizontalBar(cls.fields) g.stack =", "graph_title='Question 8', show_graph_title=True, no_css=False, ) g.__dict__.update(options) g.add_data(dict(data=[2, 22, 98, 143, 82], title='intermediate')) g.add_data(dict(data=[2,", "7' g.show_graph_title = True g.add_data({'data': [-2, 3, 1, 3, 1], 'title': 'Female'}) g.add_data({'data':", "rotate_y_labels=False, x_label_format=\"%m/%d\", timescale_divisions=\"1 week\", popup_format=\"%m/%d/%y\", area_fill=True, min_y_value=0, ) ) g.add_data(dict(data=data1, title=\"Data\")) return g", ") g.add_data(dict(data=data1, title=\"Data\")) return g def save_samples(): root = os.path.dirname(__file__) for sample_name, sample", "yield 'Line', sample_Line() class SampleBar: fields = ['Internet', 'TV', 'Newspaper', 'Magazine', 'Radio'] @classmethod" ]
[ "worker(num): time.sleep(0.5) print(num) return num if __name__=='__main__': data = list(range(100)) bsize = 10", "num if __name__=='__main__': data = list(range(100)) bsize = 10 reader = data_reader.data_reader(data, worker,", "data = list(range(100)) bsize = 10 reader = data_reader.data_reader(data, worker, bsize) for i", "= list(range(100)) bsize = 10 reader = data_reader.data_reader(data, worker, bsize) for i in", "return num if __name__=='__main__': data = list(range(100)) bsize = 10 reader = data_reader.data_reader(data,", "as tf def worker(num): time.sleep(0.5) print(num) return num if __name__=='__main__': data = list(range(100))", "reader = data_reader.data_reader(data, worker, bsize) for i in range(10): a = reader.get_next_batch() print(a)", "if __name__=='__main__': data = list(range(100)) bsize = 10 reader = data_reader.data_reader(data, worker, bsize)", "data_reader import time import tensorflow as tf def worker(num): time.sleep(0.5) print(num) return num", "time import tensorflow as tf def worker(num): time.sleep(0.5) print(num) return num if __name__=='__main__':", "__name__=='__main__': data = list(range(100)) bsize = 10 reader = data_reader.data_reader(data, worker, bsize) for", "import data_reader import time import tensorflow as tf def worker(num): time.sleep(0.5) print(num) return", "import time import tensorflow as tf def worker(num): time.sleep(0.5) print(num) return num if", "list(range(100)) bsize = 10 reader = data_reader.data_reader(data, worker, bsize) for i in range(10):", "tf def worker(num): time.sleep(0.5) print(num) return num if __name__=='__main__': data = list(range(100)) bsize", "def worker(num): time.sleep(0.5) print(num) return num if __name__=='__main__': data = list(range(100)) bsize =", "tensorflow as tf def worker(num): time.sleep(0.5) print(num) return num if __name__=='__main__': data =", "10 reader = data_reader.data_reader(data, worker, bsize) for i in range(10): a = reader.get_next_batch()", "time.sleep(0.5) print(num) return num if __name__=='__main__': data = list(range(100)) bsize = 10 reader", "bsize = 10 reader = data_reader.data_reader(data, worker, bsize) for i in range(10): a", "print(num) return num if __name__=='__main__': data = list(range(100)) bsize = 10 reader =", "import tensorflow as tf def worker(num): time.sleep(0.5) print(num) return num if __name__=='__main__': data", "= 10 reader = data_reader.data_reader(data, worker, bsize) for i in range(10): a =" ]
[ "from .PathSplitter import RegexSplitter __DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ = [ 'Caja', 'CajaMapping', 'CajaMutableMapping',", ".CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence from .CajaMutableSet import CajaMutableSet from .CajaSequence", "__DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ = [ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet'", "CajaMutableSet from .CajaSequence import CajaSequence from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__ = CajaMutableMapping __all__", ".CajaMutableSet import CajaMutableSet from .CajaSequence import CajaSequence from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__ =", "CajaSequence from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__ = CajaMutableMapping __all__ = __cajas__ + ['PathSplitter',", "'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from .Caja import Caja from", "= [ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from .Caja import", "__cajas__ = [ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from .Caja", "[ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from .Caja import Caja", ".Caja import Caja from .CajaMapping import CajaMapping from .CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence", "from .CajaSequence import CajaSequence from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__ = CajaMutableMapping __all__ =", "from .CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence from .CajaMutableSet import CajaMutableSet from", "= RegexSplitter __cajas__ = [ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ]", "CajaMutableSequence from .CajaMutableSet import CajaMutableSet from .CajaSequence import CajaSequence from .CajaSet import CajaSet", ".CajaMutableSequence import CajaMutableSequence from .CajaMutableSet import CajaMutableSet from .CajaSequence import CajaSequence from .CajaSet", "'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from .Caja import Caja from .CajaMapping import CajaMapping", "import PathSplitter from .PathSplitter import RegexSplitter __DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ = [ 'Caja',", "from .PathSplitter import PathSplitter from .PathSplitter import RegexSplitter __DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ =", "RegexSplitter __cajas__ = [ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from", "from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__ = CajaMutableMapping __all__ = __cajas__ + ['PathSplitter', 'RegexSplitter']", "import CajaSequence from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__ = CajaMutableMapping __all__ = __cajas__ +", "'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from .Caja import Caja from .CajaMapping import", "CajaMapping from .CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence from .CajaMutableSet import CajaMutableSet", ".PathSplitter import PathSplitter from .PathSplitter import RegexSplitter __DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ = [", "] from .Caja import Caja from .CajaMapping import CajaMapping from .CajaMutableMapping import CajaMutableMapping", "Caja from .CajaMapping import CajaMapping from .CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence", "import CajaMutableSet from .CajaSequence import CajaSequence from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__ = CajaMutableMapping", ".CajaMapping import CajaMapping from .CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence from .CajaMutableSet", "import CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence from .CajaMutableSet import CajaMutableSet from .CajaSequence import", "'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from .Caja import Caja from .CajaMapping", ".PathSplitter import RegexSplitter __DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ = [ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence',", "'CajaSet' ] from .Caja import Caja from .CajaMapping import CajaMapping from .CajaMutableMapping import", "'CajaMutableSet', 'CajaSequence', 'CajaSet' ] from .Caja import Caja from .CajaMapping import CajaMapping from", "import Caja from .CajaMapping import CajaMapping from .CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence import", "'CajaSequence', 'CajaSet' ] from .Caja import Caja from .CajaMapping import CajaMapping from .CajaMutableMapping", "RegexSplitter __DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ = [ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet', 'CajaSequence',", "from .CajaMapping import CajaMapping from .CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence from", "CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence from .CajaMutableSet import CajaMutableSet from .CajaSequence import CajaSequence", "from .CajaMutableSequence import CajaMutableSequence from .CajaMutableSet import CajaMutableSet from .CajaSequence import CajaSequence from", "from .CajaMutableSet import CajaMutableSet from .CajaSequence import CajaSequence from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__", "import CajaMutableSequence from .CajaMutableSet import CajaMutableSet from .CajaSequence import CajaSequence from .CajaSet import", "from .Caja import Caja from .CajaMapping import CajaMapping from .CajaMutableMapping import CajaMutableMapping from", "PathSplitter from .PathSplitter import RegexSplitter __DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ = [ 'Caja', 'CajaMapping',", "import RegexSplitter __DEFAULT_PATH_SPLITTER__ = RegexSplitter __cajas__ = [ 'Caja', 'CajaMapping', 'CajaMutableMapping', 'CajaMutableSequence', 'CajaMutableSet',", "import CajaMapping from .CajaMutableMapping import CajaMutableMapping from .CajaMutableSequence import CajaMutableSequence from .CajaMutableSet import", ".CajaSequence import CajaSequence from .CajaSet import CajaSet __DEFAULT_NONE_CAJA__ = CajaMutableMapping __all__ = __cajas__" ]
[ "else: content_type = mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data = b''.join(chunk for chunk in", "return self._block_blob_service @property def azure_protocol(self): if self.azure_ssl: return 'https' return 'http' if self.azure_ssl", "self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return name def url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container,", "base_blob_service(self): if self._base_blob_service is None: self._base_blob_service = BaseBlobService( self.account_name, self.account_key) return self._base_blob_service @property", "return blob.properties.content_length def _save(self, name, content): if hasattr(content.file, 'content_type'): content_type = content.file.content_type else:", "from django.conf import settings from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from", "None self._block_blob_service = None @property def base_blob_service(self): if self._base_blob_service is None: self._base_blob_service =", "def azure_protocol(self): if self.azure_ssl: return 'https' return 'http' if self.azure_ssl is not None", "ContentFile(blob.content) def exists(self, name): return self.base_blob_service.exists(self.azure_container, name) def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name)", "self.azure_ssl is not None else None def _open(self, name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container,", "= self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def _save(self, name, content): if hasattr(content.file, 'content_type'): content_type", "settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service = None", "= mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data = b''.join(chunk for chunk in content.chunks()) else:", "content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return name def url(self, name):", "azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService from azure.common", "= None self._block_blob_service = None @property def base_blob_service(self): if self._base_blob_service is None: self._base_blob_service", "import BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService from azure.common import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage):", "name, content_data, content_settings=ContentSettings(content_type=content_type)) return name def url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol,", "import Storage from django.utils.deconstruct import deconstructible from django.conf import settings from azure.storage.blob.models import", "from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService from", "azure.storage.blob.blockblobservice import BlockBlobService from azure.common import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER,", "else None def _open(self, name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def", "content.chunks()) else: content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return name def", "self._base_blob_service = BaseBlobService( self.account_name, self.account_key) return self._base_blob_service @property def block_blob_service(self): if self._block_blob_service is", "self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service", "from azure.common import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage,", "mimetypes from django.core.files.base import ContentFile from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible", "self.azure_container = azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service = None @property", "if self._base_blob_service is None: self._base_blob_service = BaseBlobService( self.account_name, self.account_key) return self._base_blob_service @property def", "self.account_name, self.account_key) return self._base_blob_service @property def block_blob_service(self): if self._block_blob_service is None: self._block_blob_service =", "return 'https' return 'http' if self.azure_ssl is not None else None def _open(self,", "django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from django.conf import settings from azure.storage.blob.models", "**kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container", "not None else None def _open(self, name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return", "block_blob_service(self): if self._block_blob_service is None: self._block_blob_service = BlockBlobService( self.account_name, self.account_key) return self._block_blob_service @property", "@deconstructible class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name =", "return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self, name): blob = self.base_blob_service.get_blob_properties( self.azure_container,", "**kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL", "b''.join(chunk for chunk in content.chunks()) else: content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data,", "cover pass def size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def _save(self,", "import deconstructible from django.conf import settings from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import", "ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService from azure.common import AzureMissingResourceHttpError", "= content.file.content_type else: content_type = mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data = b''.join(chunk for", "name) except AzureMissingResourceHttpError: # pragma: no cover pass def size(self, name): blob =", "@property def azure_protocol(self): if self.azure_ssl: return 'https' return 'http' if self.azure_ssl is not", "is None: self._block_blob_service = BlockBlobService( self.account_name, self.account_key) return self._block_blob_service @property def azure_protocol(self): if", "pass def size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def _save(self, name,", "content): if hasattr(content.file, 'content_type'): content_type = content.file.content_type else: content_type = mimetypes.guess_type(name)[0] if hasattr(content,", "hasattr(content.file, 'content_type'): content_type = content.file.content_type else: content_type = mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data", "'content_type'): content_type = content.file.content_type else: content_type = mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data =", "self._base_blob_service = None self._block_blob_service = None @property def base_blob_service(self): if self._base_blob_service is None:", "self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service", "self.account_key) return self._base_blob_service @property def block_blob_service(self): if self._block_blob_service is None: self._block_blob_service = BlockBlobService(", "= BlockBlobService( self.account_name, self.account_key) return self._block_blob_service @property def azure_protocol(self): if self.azure_ssl: return 'https'", "_save(self, name, content): if hasattr(content.file, 'content_type'): content_type = content.file.content_type else: content_type = mimetypes.guess_type(name)[0]", "blob.properties.content_length def _save(self, name, content): if hasattr(content.file, 'content_type'): content_type = content.file.content_type else: content_type", "azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container", "import ContentFile from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from django.conf import", "if hasattr(content, 'chunks'): content_data = b''.join(chunk for chunk in content.chunks()) else: content_data =", "from azure.storage.blob.blockblobservice import BlockBlobService from azure.common import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def __init__(self,", "AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key", "settings from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService", "protocol=self.azure_protocol, ) def get_modified_time(self, name): blob = self.base_blob_service.get_blob_properties( self.azure_container, name ) return blob.properties.last_modified", "self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return name def url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name,", "import BlockBlobService from azure.common import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args,", "import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs)", "self.account_key) return self._block_blob_service @property def azure_protocol(self): if self.azure_ssl: return 'https' return 'http' if", "= settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service = None @property def base_blob_service(self): if self._base_blob_service", "= settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service =", "if self.azure_ssl: return 'https' return 'http' if self.azure_ssl is not None else None", "_open(self, name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def exists(self, name): return", "django.core.files.base import ContentFile from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from django.conf", "from django.core.files.base import ContentFile from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from", "deconstructible from django.conf import settings from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService", "self.azure_ssl: return 'https' return 'http' if self.azure_ssl is not None else None def", "name def url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self, name):", "delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: # pragma: no cover pass def", "mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data = b''.join(chunk for chunk in content.chunks()) else: content_data", "self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl =", "pragma: no cover pass def size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length", "name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def _save(self, name, content): if hasattr(content.file,", "= b''.join(chunk for chunk in content.chunks()) else: content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name,", "chunk in content.chunks()) else: content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return", "blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self, name): blob = self.base_blob_service.get_blob_properties( self.azure_container, name ) return", "self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: # pragma: no cover pass def size(self, name): blob", "'chunks'): content_data = b''.join(chunk for chunk in content.chunks()) else: content_data = content.read() self.block_blob_service.create_blob_from_bytes(", "self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self, name): blob = self.base_blob_service.get_blob_properties( self.azure_container, name", "None def _open(self, name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def exists(self,", "name, content): if hasattr(content.file, 'content_type'): content_type = content.file.content_type else: content_type = mimetypes.guess_type(name)[0] if", "except AzureMissingResourceHttpError: # pragma: no cover pass def size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container,", "django.conf import settings from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice", "self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def exists(self, name): return self.base_blob_service.exists(self.azure_container, name) def delete(self, name):", "is None: self._base_blob_service = BaseBlobService( self.account_name, self.account_key) return self._base_blob_service @property def block_blob_service(self): if", "azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service = None @property def base_blob_service(self):", "return self._base_blob_service @property def block_blob_service(self): if self._block_blob_service is None: self._block_blob_service = BlockBlobService( self.account_name,", "def _open(self, name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def exists(self, name):", "import mimetypes from django.core.files.base import ContentFile from django.core.files.storage import Storage from django.utils.deconstruct import", "BlockBlobService from azure.common import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs):", "azure_protocol(self): if self.azure_ssl: return 'https' return 'http' if self.azure_ssl is not None else", "def size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def _save(self, name, content):", "content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return name def url(self, name): return self.base_blob_service.make_blob_url(", "content_settings=ContentSettings(content_type=content_type)) return name def url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def", "def _save(self, name, content): if hasattr(content.file, 'content_type'): content_type = content.file.content_type else: content_type =", "settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service = None", "container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self, name): blob = self.base_blob_service.get_blob_properties( self.azure_container, name )", "name): return self.base_blob_service.exists(self.azure_container, name) def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: #", "= BaseBlobService( self.account_name, self.account_key) return self._base_blob_service @property def block_blob_service(self): if self._block_blob_service is None:", "BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService from azure.common import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def", "return 'http' if self.azure_ssl is not None else None def _open(self, name, mode=\"rb\"):", "AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name", "is not None else None def _open(self, name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name)", "None else None def _open(self, name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content)", "blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def exists(self, name): return self.base_blob_service.exists(self.azure_container, name) def", "= None @property def base_blob_service(self): if self._base_blob_service is None: self._base_blob_service = BaseBlobService( self.account_name,", "azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService from azure.common import AzureMissingResourceHttpError @deconstructible class", "None @property def base_blob_service(self): if self._base_blob_service is None: self._base_blob_service = BaseBlobService( self.account_name, self.account_key)", "BlockBlobService( self.account_name, self.account_key) return self._block_blob_service @property def azure_protocol(self): if self.azure_ssl: return 'https' return", "if hasattr(content.file, 'content_type'): content_type = content.file.content_type else: content_type = mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'):", "= azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service = None @property def", "exists(self, name): return self.base_blob_service.exists(self.azure_container, name) def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError:", "blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def _save(self, name, content): if hasattr(content.file, 'content_type'):", "self._block_blob_service = None @property def base_blob_service(self): if self._base_blob_service is None: self._base_blob_service = BaseBlobService(", "= settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service =", "self._block_blob_service is None: self._block_blob_service = BlockBlobService( self.account_name, self.account_key) return self._block_blob_service @property def azure_protocol(self):", "self._block_blob_service @property def azure_protocol(self): if self.azure_ssl: return 'https' return 'http' if self.azure_ssl is", "'http' if self.azure_ssl is not None else None def _open(self, name, mode=\"rb\"): blob", "content_data = b''.join(chunk for chunk in content.chunks()) else: content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container,", "self.azure_ssl = settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service = None @property def base_blob_service(self): if", "name) return ContentFile(blob.content) def exists(self, name): return self.base_blob_service.exists(self.azure_container, name) def delete(self, name): try:", "self._block_blob_service = BlockBlobService( self.account_name, self.account_key) return self._block_blob_service @property def azure_protocol(self): if self.azure_ssl: return", "url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self, name): blob =", "content_data, content_settings=ContentSettings(content_type=content_type)) return name def url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, )", "content_type = mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data = b''.join(chunk for chunk in content.chunks())", "class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME", "BaseBlobService( self.account_name, self.account_key) return self._base_blob_service @property def block_blob_service(self): if self._block_blob_service is None: self._block_blob_service", "if self.azure_ssl is not None else None def _open(self, name, mode=\"rb\"): blob =", "import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService from azure.common import", "self._base_blob_service is None: self._base_blob_service = BaseBlobService( self.account_name, self.account_key) return self._base_blob_service @property def block_blob_service(self):", "azure.common import AzureMissingResourceHttpError @deconstructible class AzureStorage(Storage): def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args,", "no cover pass def size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def", "content_type = content.file.content_type else: content_type = mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data = b''.join(chunk", "ContentFile from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from django.conf import settings", "__init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY", "name) return blob.properties.content_length def _save(self, name, content): if hasattr(content.file, 'content_type'): content_type = content.file.content_type", "content.file.content_type else: content_type = mimetypes.guess_type(name)[0] if hasattr(content, 'chunks'): content_data = b''.join(chunk for chunk", "import settings from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice import", "AzureMissingResourceHttpError: # pragma: no cover pass def size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name)", "name) def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: # pragma: no cover", "name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self, name): blob = self.base_blob_service.get_blob_properties(", "self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def _save(self, name, content): if hasattr(content.file, 'content_type'): content_type =", "'https' return 'http' if self.azure_ssl is not None else None def _open(self, name,", "settings.AZURE_STATICFILES_SSL self._base_blob_service = None self._block_blob_service = None @property def base_blob_service(self): if self._base_blob_service is", "def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: # pragma: no cover pass", "for chunk in content.chunks()) else: content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type))", "self.account_name, self.account_key) return self._block_blob_service @property def azure_protocol(self): if self.azure_ssl: return 'https' return 'http'", "hasattr(content, 'chunks'): content_data = b''.join(chunk for chunk in content.chunks()) else: content_data = content.read()", "def base_blob_service(self): if self._base_blob_service is None: self._base_blob_service = BaseBlobService( self.account_name, self.account_key) return self._base_blob_service", "return ContentFile(blob.content) def exists(self, name): return self.base_blob_service.exists(self.azure_container, name) def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container,", "@property def base_blob_service(self): if self._base_blob_service is None: self._base_blob_service = BaseBlobService( self.account_name, self.account_key) return", "None: self._base_blob_service = BaseBlobService( self.account_name, self.account_key) return self._base_blob_service @property def block_blob_service(self): if self._block_blob_service", "@property def block_blob_service(self): if self._block_blob_service is None: self._block_blob_service = BlockBlobService( self.account_name, self.account_key) return", "return name def url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self,", "def url(self, name): return self.base_blob_service.make_blob_url( container_name=self.azure_container, blob_name=name, protocol=self.azure_protocol, ) def get_modified_time(self, name): blob", "name, mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def exists(self, name): return self.base_blob_service.exists(self.azure_container,", "# pragma: no cover pass def size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return", "mode=\"rb\"): blob = self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def exists(self, name): return self.base_blob_service.exists(self.azure_container, name)", "else: content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return name def url(self,", "None: self._block_blob_service = BlockBlobService( self.account_name, self.account_key) return self._block_blob_service @property def azure_protocol(self): if self.azure_ssl:", "try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: # pragma: no cover pass def size(self, name):", "in content.chunks()) else: content_data = content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return name", "super(AzureStorage, self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container = azure_container self.azure_ssl", "*args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key = settings.AZURE_STORAGE_ACCOUNT_KEY self.azure_container =", "= self.base_blob_service.get_blob_to_bytes(self.azure_container, name) return ContentFile(blob.content) def exists(self, name): return self.base_blob_service.exists(self.azure_container, name) def delete(self,", "size(self, name): blob = self.base_blob_service.get_blob_properties(self.azure_container, name) return blob.properties.content_length def _save(self, name, content): if", "from django.core.files.storage import Storage from django.utils.deconstruct import deconstructible from django.conf import settings from", "def block_blob_service(self): if self._block_blob_service is None: self._block_blob_service = BlockBlobService( self.account_name, self.account_key) return self._block_blob_service", "def __init__(self, azure_container=settings.AZURE_STATICFILES_CONTAINER, *args, **kwargs): super(AzureStorage, self).__init__(*args, **kwargs) self.account_name = settings.AZURE_STORAGE_ACCOUNT_NAME self.account_key =", "return self.base_blob_service.exists(self.azure_container, name) def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: # pragma:", "Storage from django.utils.deconstruct import deconstructible from django.conf import settings from azure.storage.blob.models import ContentSettings", "from django.utils.deconstruct import deconstructible from django.conf import settings from azure.storage.blob.models import ContentSettings from", "from azure.storage.blob.baseblobservice import BaseBlobService from azure.storage.blob.blockblobservice import BlockBlobService from azure.common import AzureMissingResourceHttpError @deconstructible", "self.base_blob_service.exists(self.azure_container, name) def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: # pragma: no", "if self._block_blob_service is None: self._block_blob_service = BlockBlobService( self.account_name, self.account_key) return self._block_blob_service @property def", "name): try: self.base_blob_service.delete_blob(self.azure_container, name) except AzureMissingResourceHttpError: # pragma: no cover pass def size(self,", "django.utils.deconstruct import deconstructible from django.conf import settings from azure.storage.blob.models import ContentSettings from azure.storage.blob.baseblobservice", "def exists(self, name): return self.base_blob_service.exists(self.azure_container, name) def delete(self, name): try: self.base_blob_service.delete_blob(self.azure_container, name) except", "= content.read() self.block_blob_service.create_blob_from_bytes( self.azure_container, name, content_data, content_settings=ContentSettings(content_type=content_type)) return name def url(self, name): return", "self._base_blob_service @property def block_blob_service(self): if self._block_blob_service is None: self._block_blob_service = BlockBlobService( self.account_name, self.account_key)" ]
[ "<NAME> # Painted Harmony Group, Inc # June 26, 2017 # Please See", "import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is", "test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet. Have a nice day.\")==\"pos\")", "#-------------------------------------------------------------- import unittest import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa =", "By <NAME> # Painted Harmony Group, Inc # June 26, 2017 # Please", "self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet. Have a nice day.\")==\"pos\") self.assertTrue(sa.analyze_sentiment(\"I am angry. He", "is a happy tweet. Have a nice day.\")==\"pos\") self.assertTrue(sa.analyze_sentiment(\"I am angry. He is", "Group, Inc # June 26, 2017 # Please See LICENSE.txt #-------------------------------------------------------------- import unittest", "analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet.", "happy tweet. Have a nice day.\")==\"pos\") self.assertTrue(sa.analyze_sentiment(\"I am angry. He is very disonest.", "a happy tweet. Have a nice day.\")==\"pos\") self.assertTrue(sa.analyze_sentiment(\"I am angry. He is very", "analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet. Have a nice day.\")==\"pos\") self.assertTrue(sa.analyze_sentiment(\"I am angry.", "import unittest import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer()", "# June 26, 2017 # Please See LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer", "#-------------------------------------------------------------- # By <NAME> # Painted Harmony Group, Inc # June 26, 2017", "Harmony Group, Inc # June 26, 2017 # Please See LICENSE.txt #-------------------------------------------------------------- import", "Inc # June 26, 2017 # Please See LICENSE.txt #-------------------------------------------------------------- import unittest import", "Please See LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): def", "See LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self):", "SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet. Have a", "LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa", "2017 # Please See LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer as analyzer class", "# Painted Harmony Group, Inc # June 26, 2017 # Please See LICENSE.txt", "SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a", "June 26, 2017 # Please See LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer as", "Painted Harmony Group, Inc # June 26, 2017 # Please See LICENSE.txt #--------------------------------------------------------------", "unittest import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This", "tweet. Have a nice day.\")==\"pos\") self.assertTrue(sa.analyze_sentiment(\"I am angry. He is very disonest. Sad.\")==\"neg\")", "# Please See LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer as analyzer class SentimentAnalyzerTest(unittest.TestCase):", "# By <NAME> # Painted Harmony Group, Inc # June 26, 2017 #", "= analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet. Have a nice day.\")==\"pos\") self.assertTrue(sa.analyze_sentiment(\"I am", "as analyzer class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy", "sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet. Have a nice day.\")==\"pos\") self.assertTrue(sa.analyze_sentiment(\"I", "def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet. Have a nice", "class SentimentAnalyzerTest(unittest.TestCase): def test_analyze_sentiment(self): sa = analyzer.SentimentAnalyzer() self.assertTrue(sa.analyze_sentiment(\"This is a happy tweet. Have", "26, 2017 # Please See LICENSE.txt #-------------------------------------------------------------- import unittest import SentimentAnalyzer as analyzer" ]